summaryrefslogtreecommitdiff
path: root/vendor/github.com/go-swagger
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/go-swagger')
-rw-r--r--vendor/github.com/go-swagger/go-swagger/LICENSE202
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/.gitignore5
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go145
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/array_diff.go106
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go266
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go111
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difference_location.go22
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go337
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/node.go82
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/reporting.go118
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/schema.go126
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go759
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_difference.go216
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/type_adapters.go163
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go81
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/flatten.go48
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate.go29
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/cli.go26
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/client.go86
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/contrib.go17
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/markdown.go33
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go98
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/operation.go104
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/server.go119
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go240
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_nonwin.go19
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_win.go8
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go125
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go119
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/support.go67
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd.go13
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd/spec.go111
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/mixin.go117
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go117
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/validate.go83
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/version.go37
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/swagger.go143
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/README.md3
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/application.go674
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/doc.go6
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/enum.go32
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/meta.go252
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/operations.go170
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/parameters.go518
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/parser.go1667
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go51
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go42
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/regexprs.go96
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/responses.go454
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/route_params.go263
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/routes.go93
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/schema.go1155
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/spec.go258
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/.gitignore1
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/bindata.go40
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/client.go120
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/config.go61
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/debug.go64
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/discriminators.go75
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/doc.go78
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/formats.go226
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go50
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/genopts_win.go12
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/language.go440
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/media.go191
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/model.go2118
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/operation.go1303
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/shared.go1096
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/spec.go273
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/structs.go803
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/support.go546
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/template_repo.go855
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl242
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/completion.gotmpl77
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl28
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl25
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl230
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl97
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl59
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl193
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl127
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/client/facade.gotmpl129
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/client/parameter.gotmpl406
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl346
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md311
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/client.gotmpl111
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/facade.gotmpl83
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl222
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/server.gotmpl9
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl25
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/header.gotmpl20
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl527
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/model.gotmpl27
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schema.gotmpl131
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schemabody.gotmpl330
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schemaembedded.gotmpl21
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schemapolymorphic.gotmpl53
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schematype.gotmpl29
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl1194
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/additionalpropertiesserializer.gotmpl94
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/aliasedserializer.gotmpl11
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/allofserializer.gotmpl180
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/basetypeserializer.gotmpl69
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/marshalbinaryserializer.gotmpl19
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/schemaserializer.gotmpl15
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/subtypeserializer.gotmpl172
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/tupleserializer.gotmpl66
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/autoconfigureapi.gotmpl205
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/builder.gotmpl446
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/configureapi.gotmpl167
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/doc.gotmpl63
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/main.gotmpl186
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/operation.gotmpl92
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/parameter.gotmpl720
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/responses.gotmpl271
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl660
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/urlbuilder.gotmpl213
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsinit.gotmpl23
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsvar.gotmpl29
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/structfield.gotmpl41
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/swagger_json_embed.gotmpl30
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl3
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl23
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl23
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl23
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl29
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl62
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/types.go1284
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/README.md3
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/classifier.go166
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/doc.go89
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/enum.go84
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/meta.go246
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/operations.go85
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/parameters.go515
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/path.go151
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/responses.go453
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/route_params.go253
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/routes.go146
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/scanner.go974
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/schema.go1358
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/validators.go829
142 files changed, 34207 insertions, 0 deletions
diff --git a/vendor/github.com/go-swagger/go-swagger/LICENSE b/vendor/github.com/go-swagger/go-swagger/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/.gitignore b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/.gitignore
new file mode 100644
index 000000000..60607586b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/.gitignore
@@ -0,0 +1,5 @@
+swagger
+swagger.json
+models
+operations
+cmd
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go
new file mode 100644
index 000000000..5999f4948
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go
@@ -0,0 +1,145 @@
+package commands
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "log"
+ "os"
+
+ "errors"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-swagger/go-swagger/cmd/swagger/commands/diff"
+)
+
+// JSONFormat for json
+const JSONFormat = "json"
+
+// DiffCommand is a command that generates the diff of two swagger specs.
+//
+// There are no specific options for this expansion.
+type DiffCommand struct {
+ OnlyBreakingChanges bool `long:"break" short:"b" description:"When present, only shows incompatible changes"`
+ Format string `long:"format" short:"f" description:"When present, writes output as json" default:"txt" choice:"txt" choice:"json"`
+ IgnoreFile string `long:"ignore" short:"i" description:"Exception file of diffs to ignore (copy output from json diff format)" default:"none specified"`
+ Destination string `long:"dest" short:"d" description:"Output destination file or stdout" default:"stdout"`
+ Args struct {
+ OldSpec string `positional-arg-name:"{old spec}"`
+ NewSpec string `positional-arg-name:"{new spec}"`
+ } `required:"2" positional-args:"specs" description:"Input specs to be diff-ed"`
+}
+
+// Execute diffs the two specs provided
+func (c *DiffCommand) Execute(_ []string) error {
+ if c.Args.OldSpec == "" || c.Args.NewSpec == "" {
+ return errors.New(`missing arguments for diff command (use --help for more info)`)
+ }
+
+ c.printInfo()
+
+ var (
+ output io.WriteCloser
+ err error
+ )
+ if c.Destination != "stdout" {
+ output, err = os.OpenFile(c.Destination, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0600)
+ if err != nil {
+ return fmt.Errorf("%s: %w", c.Destination, err)
+ }
+ defer func() {
+ _ = output.Close()
+ }()
+ } else {
+ output = os.Stdout
+ }
+
+ diffs, err := c.getDiffs()
+ if err != nil {
+ return err
+ }
+
+ ignores, err := c.readIgnores()
+ if err != nil {
+ return err
+ }
+
+ diffs = diffs.FilterIgnores(ignores)
+ if len(ignores) > 0 {
+ log.Printf("Diff Report Ignored Items from IgnoreFile")
+ for _, eachItem := range ignores {
+ log.Printf("%s", eachItem.String())
+ }
+ }
+
+ var (
+ input io.Reader
+ warn error
+ )
+ if c.Format != JSONFormat && c.OnlyBreakingChanges {
+ input, err, warn = diffs.ReportCompatibility()
+ } else {
+ input, err, warn = diffs.ReportAllDiffs(c.Format == JSONFormat)
+ }
+ if err != nil {
+ return err
+ }
+ _, err = io.Copy(output, input)
+ if err != nil {
+ return err
+ }
+ return warn
+}
+
+func (c *DiffCommand) readIgnores() (diff.SpecDifferences, error) {
+ ignoreFile := c.IgnoreFile
+ ignoreDiffs := diff.SpecDifferences{}
+
+ if ignoreFile == "none specified" || ignoreFile == "" {
+ return ignoreDiffs, nil
+ }
+ // Open our jsonFile
+ jsonFile, err := os.Open(ignoreFile)
+ if err != nil {
+ return nil, fmt.Errorf("%s: %w", ignoreFile, err)
+ }
+ defer func() {
+ _ = jsonFile.Close()
+ }()
+ byteValue, err := io.ReadAll(jsonFile)
+ if err != nil {
+ return nil, fmt.Errorf("reading %s: %w", ignoreFile, err)
+ }
+ err = json.Unmarshal(byteValue, &ignoreDiffs)
+ if err != nil {
+ return nil, err
+ }
+ return ignoreDiffs, nil
+}
+
+func (c *DiffCommand) getDiffs() (diff.SpecDifferences, error) {
+ oldSpecPath, newSpecPath := c.Args.OldSpec, c.Args.NewSpec
+ swaggerDoc1 := oldSpecPath
+ specDoc1, err := loads.Spec(swaggerDoc1)
+ if err != nil {
+ return nil, err
+ }
+
+ swaggerDoc2 := newSpecPath
+ specDoc2, err := loads.Spec(swaggerDoc2)
+ if err != nil {
+ return nil, err
+ }
+
+ return diff.Compare(specDoc1.Spec(), specDoc2.Spec())
+}
+
+func (c *DiffCommand) printInfo() {
+ log.Println("Run Config:")
+ log.Printf("Spec1: %s", c.Args.OldSpec)
+ log.Printf("Spec2: %s", c.Args.NewSpec)
+ log.Printf("ReportOnlyBreakingChanges (-c) :%v", c.OnlyBreakingChanges)
+ log.Printf("OutputFormat (-f) :%s", c.Format)
+ log.Printf("IgnoreFile (-i) :%s", c.IgnoreFile)
+ log.Printf("Diff Report Destination (-d) :%s", c.Destination)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/array_diff.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/array_diff.go
new file mode 100644
index 000000000..6e2fef3bc
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/array_diff.go
@@ -0,0 +1,106 @@
+package diff
+
+// This is a simple DSL for diffing arrays
+
+// fromArrayStruct utility struct to encompass diffing of string arrays
+type fromArrayStruct struct {
+ from []string
+}
+
+// fromStringArray starts a fluent diff expression
+func fromStringArray(from []string) fromArrayStruct {
+ return fromArrayStruct{from}
+}
+
+// DiffsTo completes a fluent diff expression
+func (f fromArrayStruct) DiffsTo(toArray []string) (added, deleted, common []string) {
+ inFrom := 1
+ inTo := 2
+
+ if f.from == nil {
+ return toArray, []string{}, []string{}
+ }
+
+ m := make(map[string]int, len(toArray))
+ added = make([]string, 0, len(toArray))
+ deleted = make([]string, 0, len(f.from))
+ common = make([]string, 0, len(f.from))
+
+ for _, item := range f.from {
+ m[item] = inFrom
+ }
+
+ for _, item := range toArray {
+ if _, ok := m[item]; ok {
+ m[item] |= inTo
+ } else {
+ m[item] = inTo
+ }
+ }
+ for key, val := range m {
+ switch val {
+ case inFrom:
+ deleted = append(deleted, key)
+ case inTo:
+ added = append(added, key)
+ default:
+ common = append(common, key)
+ }
+ }
+ return
+}
+
+// fromMapStruct utility struct to encompass diffing of string arrays
+type fromMapStruct struct {
+ srcMap map[string]interface{}
+}
+
+// fromStringMap starts a comparison by declaring a source map
+func fromStringMap(srcMap map[string]interface{}) fromMapStruct {
+ return fromMapStruct{srcMap}
+}
+
+// Pair stores a pair of items which share a key in two maps
+type Pair struct {
+ First interface{}
+ Second interface{}
+}
+
+// DiffsTo - generates diffs for a comparison
+func (f fromMapStruct) DiffsTo(destMap map[string]interface{}) (added, deleted, common map[string]interface{}) {
+ added = make(map[string]interface{})
+ deleted = make(map[string]interface{})
+ common = make(map[string]interface{})
+
+ inSrc := 1
+ inDest := 2
+
+ m := make(map[string]int)
+
+ // enter values for all items in the source array
+ for key := range f.srcMap {
+ m[key] = inSrc
+ }
+
+ // now either set or 'boolean or' a new flag if in the second collection
+ for key := range destMap {
+ if _, ok := m[key]; ok {
+ m[key] |= inDest
+ } else {
+ m[key] = inDest
+ }
+ }
+ // finally inspect the values and generate the left,right and shared collections
+ // for the shared items, store both values in case there's a diff
+ for key, val := range m {
+ switch val {
+ case inSrc:
+ deleted[key] = f.srcMap[key]
+ case inDest:
+ added[key] = destMap[key]
+ default:
+ common[key] = Pair{f.srcMap[key], destMap[key]}
+ }
+ }
+ return added, deleted, common
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go
new file mode 100644
index 000000000..2ae1b8227
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go
@@ -0,0 +1,266 @@
+package diff
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+// CompareEnums returns added, deleted enum values
+func CompareEnums(left, right []interface{}) []TypeDiff {
+ diffs := []TypeDiff{}
+
+ leftStrs := []string{}
+ rightStrs := []string{}
+ for _, eachLeft := range left {
+ leftStrs = append(leftStrs, fmt.Sprintf("%v", eachLeft))
+ }
+ for _, eachRight := range right {
+ rightStrs = append(rightStrs, fmt.Sprintf("%v", eachRight))
+ }
+ added, deleted, _ := fromStringArray(leftStrs).DiffsTo(rightStrs)
+ if len(added) > 0 {
+ typeChange := strings.Join(added, ",")
+ diffs = append(diffs, TypeDiff{Change: AddedEnumValue, Description: typeChange})
+ }
+ if len(deleted) > 0 {
+ typeChange := strings.Join(deleted, ",")
+ diffs = append(diffs, TypeDiff{Change: DeletedEnumValue, Description: typeChange})
+ }
+
+ return diffs
+}
+
+// CompareProperties recursive property comparison
+func CompareProperties(location DifferenceLocation, schema1 *spec.Schema, schema2 *spec.Schema, getRefFn1 SchemaFromRefFn, getRefFn2 SchemaFromRefFn, cmp CompareSchemaFn) []SpecDifference {
+ propDiffs := []SpecDifference{}
+
+ if schema1.Properties == nil && schema2.Properties == nil {
+ return propDiffs
+ }
+
+ schema1Props := propertiesFor(schema1, getRefFn1)
+ schema2Props := propertiesFor(schema2, getRefFn2)
+ // find deleted and changed properties
+
+ for eachProp1Name, eachProp1 := range schema1Props {
+ eachProp1 := eachProp1
+ childLoc := addChildDiffNode(location, eachProp1Name, eachProp1.Schema)
+
+ if eachProp2, ok := schema2Props[eachProp1Name]; ok {
+ diffs := CheckToFromRequired(eachProp1.Required, eachProp2.Required)
+ if len(diffs) > 0 {
+ for _, diff := range diffs {
+ propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: diff.Change})
+ }
+ }
+ cmp(childLoc, eachProp1.Schema, eachProp2.Schema)
+ } else {
+ propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: DeletedProperty})
+ }
+ }
+
+ // find added properties
+ for eachProp2Name, eachProp2 := range schema2.Properties {
+ eachProp2 := eachProp2
+ if _, ok := schema1.Properties[eachProp2Name]; !ok {
+ childLoc := addChildDiffNode(location, eachProp2Name, &eachProp2)
+ propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: AddedProperty})
+ }
+ }
+ return propDiffs
+
+}
+
+// CompareFloatValues compares a float data item
+func CompareFloatValues(fieldName string, val1 *float64, val2 *float64, ifGreaterCode SpecChangeCode, ifLessCode SpecChangeCode) []TypeDiff {
+ diffs := []TypeDiff{}
+ if val1 != nil && val2 != nil {
+ if *val2 > *val1 {
+ diffs = append(diffs, TypeDiff{Change: ifGreaterCode, Description: fmt.Sprintf("%s %f->%f", fieldName, *val1, *val2)})
+ } else if *val2 < *val1 {
+ diffs = append(diffs, TypeDiff{Change: ifLessCode, Description: fmt.Sprintf("%s %f->%f", fieldName, *val1, *val2)})
+ }
+ } else {
+ if val1 != val2 {
+ if val1 != nil {
+ diffs = append(diffs, TypeDiff{Change: DeletedConstraint, Description: fmt.Sprintf("%s(%f)", fieldName, *val1)})
+ } else {
+ diffs = append(diffs, TypeDiff{Change: AddedConstraint, Description: fmt.Sprintf("%s(%f)", fieldName, *val2)})
+ }
+ }
+ }
+ return diffs
+}
+
+// CompareIntValues compares to int data items
+func CompareIntValues(fieldName string, val1 *int64, val2 *int64, ifGreaterCode SpecChangeCode, ifLessCode SpecChangeCode) []TypeDiff {
+ diffs := []TypeDiff{}
+ if val1 != nil && val2 != nil {
+ if *val2 > *val1 {
+ diffs = append(diffs, TypeDiff{Change: ifGreaterCode, Description: fmt.Sprintf("%s %d->%d", fieldName, *val1, *val2)})
+ } else if *val2 < *val1 {
+ diffs = append(diffs, TypeDiff{Change: ifLessCode, Description: fmt.Sprintf("%s %d->%d", fieldName, *val1, *val2)})
+ }
+ } else {
+ if val1 != val2 {
+ if val1 != nil {
+ diffs = append(diffs, TypeDiff{Change: DeletedConstraint, Description: fmt.Sprintf("%s(%d)", fieldName, *val1)})
+ } else {
+ diffs = append(diffs, TypeDiff{Change: AddedConstraint, Description: fmt.Sprintf("%s(%d)", fieldName, *val2)})
+ }
+ }
+ }
+ return diffs
+}
+
+// CheckToFromPrimitiveType check for diff to or from a primitive
+func CheckToFromPrimitiveType(diffs []TypeDiff, type1, type2 interface{}) []TypeDiff {
+
+ type1IsPrimitive := isPrimitive(type1)
+ type2IsPrimitive := isPrimitive(type2)
+
+ // Primitive to Obj or Obj to Primitive
+ if type1IsPrimitive != type2IsPrimitive {
+ typeStr1, isarray1 := getSchemaType(type1)
+ typeStr2, isarray2 := getSchemaType(type2)
+ return addTypeDiff(diffs, TypeDiff{Change: ChangedType, FromType: formatTypeString(typeStr1, isarray1), ToType: formatTypeString(typeStr2, isarray2)})
+ }
+
+ return diffs
+}
+
+// CheckRefChange has the property ref changed
+func CheckRefChange(diffs []TypeDiff, type1, type2 interface{}) (diffReturn []TypeDiff) {
+
+ diffReturn = diffs
+ if isRefType(type1) && isRefType(type2) {
+ // both refs but to different objects (TODO detect renamed object)
+ ref1 := definitionFromRef(getRef(type1))
+ ref2 := definitionFromRef(getRef(type2))
+ if ref1 != ref2 {
+ diffReturn = addTypeDiff(diffReturn, TypeDiff{Change: RefTargetChanged, FromType: getSchemaTypeStr(type1), ToType: getSchemaTypeStr(type2)})
+ }
+ } else if isRefType(type1) != isRefType(type2) {
+ diffReturn = addTypeDiff(diffReturn, TypeDiff{Change: ChangedType, FromType: getSchemaTypeStr(type1), ToType: getSchemaTypeStr(type2)})
+ }
+ return
+}
+
+// checkNumericTypeChanges checks for changes to or from a numeric type
+func checkNumericTypeChanges(diffs []TypeDiff, type1, type2 *spec.SchemaProps) []TypeDiff {
+ // Number
+ _, type1IsNumeric := numberWideness[type1.Type[0]]
+ _, type2IsNumeric := numberWideness[type2.Type[0]]
+
+ if type1IsNumeric && type2IsNumeric {
+ foundDiff := false
+ if type1.ExclusiveMaximum && !type2.ExclusiveMaximum {
+ diffs = addTypeDiff(diffs, TypeDiff{Change: WidenedType, Description: fmt.Sprintf("Exclusive Maximum Removed:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
+ foundDiff = true
+ }
+ if !type1.ExclusiveMaximum && type2.ExclusiveMaximum {
+ diffs = addTypeDiff(diffs, TypeDiff{Change: NarrowedType, Description: fmt.Sprintf("Exclusive Maximum Added:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
+ foundDiff = true
+ }
+ if type1.ExclusiveMinimum && !type2.ExclusiveMinimum {
+ diffs = addTypeDiff(diffs, TypeDiff{Change: WidenedType, Description: fmt.Sprintf("Exclusive Minimum Removed:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
+ foundDiff = true
+ }
+ if !type1.ExclusiveMinimum && type2.ExclusiveMinimum {
+ diffs = addTypeDiff(diffs, TypeDiff{Change: NarrowedType, Description: fmt.Sprintf("Exclusive Minimum Added:%v->%v", type1.ExclusiveMinimum, type2.ExclusiveMinimum)})
+ foundDiff = true
+ }
+ if !foundDiff {
+ maxDiffs := CompareFloatValues("Maximum", type1.Maximum, type2.Maximum, WidenedType, NarrowedType)
+ diffs = append(diffs, maxDiffs...)
+ minDiffs := CompareFloatValues("Minimum", type1.Minimum, type2.Minimum, NarrowedType, WidenedType)
+ diffs = append(diffs, minDiffs...)
+ }
+ }
+ return diffs
+}
+
+// CheckStringTypeChanges checks for changes to or from a string type
+func CheckStringTypeChanges(diffs []TypeDiff, type1, type2 *spec.SchemaProps) []TypeDiff {
+ // string changes
+ if type1.Type[0] == StringType &&
+ type2.Type[0] == StringType {
+ minLengthDiffs := CompareIntValues("MinLength", type1.MinLength, type2.MinLength, NarrowedType, WidenedType)
+ diffs = append(diffs, minLengthDiffs...)
+ maxLengthDiffs := CompareIntValues("MaxLength", type1.MinLength, type2.MinLength, WidenedType, NarrowedType)
+ diffs = append(diffs, maxLengthDiffs...)
+ if type1.Pattern != type2.Pattern {
+ diffs = addTypeDiff(diffs, TypeDiff{Change: ChangedType, Description: fmt.Sprintf("Pattern Changed:%s->%s", type1.Pattern, type2.Pattern)})
+ }
+ if type1.Type[0] == StringType {
+ if len(type1.Enum) > 0 {
+ enumDiffs := CompareEnums(type1.Enum, type2.Enum)
+ diffs = append(diffs, enumDiffs...)
+ }
+ }
+ }
+ return diffs
+}
+
+// CheckToFromRequired checks for changes to or from a required property
+func CheckToFromRequired(required1, required2 bool) (diffs []TypeDiff) {
+ if required1 != required2 {
+ code := ChangedOptionalToRequired
+ if required1 {
+ code = ChangedRequiredToOptional
+ }
+ diffs = addTypeDiff(diffs, TypeDiff{Change: code})
+ }
+ return diffs
+}
+
+const objType = "object"
+
+func getTypeHierarchyChange(type1, type2 string) TypeDiff {
+ fromType := type1
+ if fromType == "" {
+ fromType = objType
+ }
+ toType := type2
+ if toType == "" {
+ toType = objType
+ }
+ diffDescription := fmt.Sprintf("%s -> %s", fromType, toType)
+ if isStringType(type1) && !isStringType(type2) {
+ return TypeDiff{Change: NarrowedType, Description: diffDescription}
+ }
+ if !isStringType(type1) && isStringType(type2) {
+ return TypeDiff{Change: WidenedType, Description: diffDescription}
+ }
+ type1Wideness, type1IsNumeric := numberWideness[type1]
+ type2Wideness, type2IsNumeric := numberWideness[type2]
+ if type1IsNumeric && type2IsNumeric {
+ if type1Wideness == type2Wideness {
+ return TypeDiff{Change: ChangedToCompatibleType, Description: diffDescription}
+ }
+ if type1Wideness > type2Wideness {
+ return TypeDiff{Change: NarrowedType, Description: diffDescription}
+ }
+ if type1Wideness < type2Wideness {
+ return TypeDiff{Change: WidenedType, Description: diffDescription}
+ }
+ }
+ return TypeDiff{Change: ChangedType, Description: diffDescription}
+}
+
+func isRefType(item interface{}) bool {
+ switch s := item.(type) {
+ case spec.Refable:
+ return s.Ref.String() != ""
+ case *spec.Schema:
+ return s.Ref.String() != ""
+ case *spec.SchemaProps:
+ return s.Ref.String() != ""
+ case *spec.SimpleSchema:
+ return false
+ default:
+ return false
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go
new file mode 100644
index 000000000..d31c0e63a
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go
@@ -0,0 +1,111 @@
+package diff
+
+// CompatibilityPolicy decides which changes are breaking and which are not
+type CompatibilityPolicy struct {
+ ForResponse map[SpecChangeCode]Compatibility
+ ForRequest map[SpecChangeCode]Compatibility
+ ForChange map[SpecChangeCode]Compatibility
+}
+
+var compatibility CompatibilityPolicy
+
+func init() {
+ compatibility = CompatibilityPolicy{
+ ForResponse: map[SpecChangeCode]Compatibility{
+ AddedRequiredProperty: Breaking,
+ DeletedProperty: Breaking,
+ AddedProperty: NonBreaking,
+ DeletedResponse: Breaking,
+ AddedResponse: NonBreaking,
+ WidenedType: NonBreaking,
+ NarrowedType: NonBreaking,
+ ChangedType: Breaking,
+ ChangedToCompatibleType: NonBreaking,
+ AddedEnumValue: Breaking,
+ DeletedEnumValue: NonBreaking,
+ AddedResponseHeader: NonBreaking,
+ ChangedResponseHeader: Breaking,
+ DeletedResponseHeader: Breaking,
+ ChangedDescripton: NonBreaking,
+ AddedDescripton: NonBreaking,
+ DeletedDescripton: NonBreaking,
+ ChangedTag: NonBreaking,
+ AddedTag: NonBreaking,
+ DeletedTag: NonBreaking,
+ DeletedConstraint: Breaking,
+ AddedConstraint: NonBreaking,
+ DeletedExtension: Warning,
+ AddedExtension: Warning,
+ },
+ ForRequest: map[SpecChangeCode]Compatibility{
+ AddedRequiredProperty: Breaking,
+ DeletedProperty: Breaking,
+ AddedProperty: Breaking,
+ AddedOptionalParam: NonBreaking,
+ AddedRequiredParam: Breaking,
+ DeletedOptionalParam: NonBreaking,
+ DeletedRequiredParam: NonBreaking,
+ WidenedType: NonBreaking,
+ NarrowedType: Breaking,
+ ChangedType: Breaking,
+ ChangedToCompatibleType: NonBreaking,
+ ChangedOptionalToRequired: Breaking,
+ ChangedRequiredToOptional: NonBreaking,
+ AddedEnumValue: NonBreaking,
+ DeletedEnumValue: Breaking,
+ ChangedDescripton: NonBreaking,
+ AddedDescripton: NonBreaking,
+ DeletedDescripton: NonBreaking,
+ ChangedTag: NonBreaking,
+ AddedTag: NonBreaking,
+ DeletedTag: NonBreaking,
+ DeletedConstraint: NonBreaking,
+ AddedConstraint: Breaking,
+ ChangedDefault: Warning,
+ AddedDefault: Warning,
+ DeletedDefault: Warning,
+ ChangedExample: NonBreaking,
+ AddedExample: NonBreaking,
+ DeletedExample: NonBreaking,
+ ChangedCollectionFormat: Breaking,
+ DeletedExtension: Warning,
+ AddedExtension: Warning,
+ },
+ ForChange: map[SpecChangeCode]Compatibility{
+ NoChangeDetected: NonBreaking,
+ AddedEndpoint: NonBreaking,
+ DeletedEndpoint: Breaking,
+ DeletedDeprecatedEndpoint: NonBreaking,
+ AddedConsumesFormat: NonBreaking,
+ DeletedConsumesFormat: Breaking,
+ AddedProducesFormat: NonBreaking,
+ DeletedProducesFormat: Breaking,
+ AddedSchemes: NonBreaking,
+ DeletedSchemes: Breaking,
+ ChangedHostURL: Breaking,
+ ChangedBasePath: Breaking,
+ ChangedDescripton: NonBreaking,
+ AddedDescripton: NonBreaking,
+ DeletedDescripton: NonBreaking,
+ ChangedTag: NonBreaking,
+ AddedTag: NonBreaking,
+ DeletedTag: NonBreaking,
+ RefTargetChanged: Breaking,
+ RefTargetRenamed: NonBreaking,
+ AddedDefinition: NonBreaking,
+ DeletedDefinition: NonBreaking,
+ DeletedExtension: Warning,
+ AddedExtension: Warning,
+ },
+ }
+}
+
+func getCompatibilityForChange(diffCode SpecChangeCode, where DataDirection) Compatibility {
+ if compat, commonChange := compatibility.ForChange[diffCode]; commonChange {
+ return compat
+ }
+ if where == Request {
+ return compatibility.ForRequest[diffCode]
+ }
+ return compatibility.ForResponse[diffCode]
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difference_location.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difference_location.go
new file mode 100644
index 000000000..3bd700b53
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difference_location.go
@@ -0,0 +1,22 @@
+package diff
+
+// DifferenceLocation indicates where the difference occurs
+type DifferenceLocation struct {
+ URL string `json:"url"`
+ Method string `json:"method,omitempty"`
+ Response int `json:"response,omitempty"`
+ Node *Node `json:"node,omitempty"`
+}
+
+// AddNode returns a copy of this location with the leaf node added
+func (dl DifferenceLocation) AddNode(node *Node) DifferenceLocation {
+ newLoc := dl
+
+ if newLoc.Node != nil {
+ newLoc.Node = newLoc.Node.Copy()
+ newLoc.Node.AddLeafNode(node)
+ } else {
+ newLoc.Node = node
+ }
+ return newLoc
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go
new file mode 100644
index 000000000..007862fb9
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go
@@ -0,0 +1,337 @@
+package diff
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "log"
+)
+
+// SpecChangeCode enumerates the various types of diffs from one spec to another
+type SpecChangeCode int
+
+const (
+ // NoChangeDetected - the specs have no changes
+ NoChangeDetected SpecChangeCode = iota
+ // DeletedProperty - A message property has been deleted in the new spec
+ DeletedProperty
+ // AddedProperty - A message property has been added in the new spec
+ AddedProperty
+ // AddedRequiredProperty - A required message property has been added in the new spec
+ AddedRequiredProperty
+ // DeletedOptionalParam - An endpoint parameter has been deleted in the new spec
+ DeletedOptionalParam
+ // ChangedDescripton - Changed a description
+ ChangedDescripton
+ // AddedDescripton - Added a description
+ AddedDescripton
+ // DeletedDescripton - Deleted a description
+ DeletedDescripton
+ // ChangedTag - Changed a tag
+ ChangedTag
+ // AddedTag - Added a tag
+ AddedTag
+ // DeletedTag - Deleted a tag
+ DeletedTag
+ // DeletedResponse - An endpoint response has been deleted in the new spec
+ DeletedResponse
+ // DeletedEndpoint - An endpoint has been deleted in the new spec
+ DeletedEndpoint
+ // DeletedDeprecatedEndpoint - A deprecated endpoint has been deleted in the new spec
+ DeletedDeprecatedEndpoint
+ // AddedRequiredParam - A required parameter has been added in the new spec
+ AddedRequiredParam
+ // DeletedRequiredParam - A required parameter has been deleted in the new spec
+ DeletedRequiredParam
+ // AddedEndpoint - An endpoint has been added in the new spec
+ AddedEndpoint
+ // WidenedType - An type has been changed to a more permissive type eg int->string
+ WidenedType
+ // NarrowedType - An type has been changed to a less permissive type eg string->int
+ NarrowedType
+ // ChangedToCompatibleType - An type has been changed to a compatible type eg password->string
+ ChangedToCompatibleType
+ // ChangedType - An type has been changed to a type whose relative compatibility cannot be determined
+ ChangedType
+ // AddedEnumValue - An enum type has had a new potential value added to it
+ AddedEnumValue
+ // DeletedEnumValue - An enum type has had a existing value removed from it
+ DeletedEnumValue
+ // AddedOptionalParam - A new optional parameter has been added to the new spec
+ AddedOptionalParam
+ // ChangedOptionalToRequired - An optional parameter is now required in the new spec
+ ChangedOptionalToRequired
+ // ChangedRequiredToOptional - An required parameter is now optional in the new spec
+ ChangedRequiredToOptional
+ // AddedResponse An endpoint has new response code in the new spec
+ AddedResponse
+ // AddedConsumesFormat - a new consumes format (json/xml/yaml etc) has been added in the new spec
+ AddedConsumesFormat
+ // DeletedConsumesFormat - an existing format has been removed in the new spec
+ DeletedConsumesFormat
+ // AddedProducesFormat - a new produces format (json/xml/yaml etc) has been added in the new spec
+ AddedProducesFormat
+ // DeletedProducesFormat - an existing produces format has been removed in the new spec
+ DeletedProducesFormat
+ // AddedSchemes - a new scheme has been added to the new spec
+ AddedSchemes
+ // DeletedSchemes - a scheme has been removed from the new spec
+ DeletedSchemes
+ // ChangedHostURL - the host url has been changed. If this is used in the client generation, then clients will break.
+ ChangedHostURL
+ // ChangedBasePath - the host base path has been changed. If this is used in the client generation, then clients will break.
+ ChangedBasePath
+ // AddedResponseHeader Added a header Item
+ AddedResponseHeader
+ // ChangedResponseHeader Added a header Item
+ ChangedResponseHeader
+ // DeletedResponseHeader Added a header Item
+ DeletedResponseHeader
+ // RefTargetChanged Changed a ref to point to a different object
+ RefTargetChanged
+ // RefTargetRenamed Renamed a ref to point to the same object
+ RefTargetRenamed
+ // DeletedConstraint Deleted a schema constraint
+ DeletedConstraint
+ // AddedConstraint Added a schema constraint
+ AddedConstraint
+ // DeletedDefinition removed one of the definitions
+ DeletedDefinition
+ // AddedDefinition removed one of the definitions
+ AddedDefinition
+ // ChangedDefault - Changed default value
+ ChangedDefault
+ // AddedDefault - Added a default value
+ AddedDefault
+ // DeletedDefault - Deleted a default value
+ DeletedDefault
+ // ChangedExample - Changed an example value
+ ChangedExample
+ // AddedExample - Added an example value
+ AddedExample
+ // DeletedExample - Deleted an example value
+ DeletedExample
+ // ChangedCollectionFormat - A collectionFormat has been changed to a collectionFormat whose relative compatibility cannot be determined
+ ChangedCollectionFormat
+ // DeletedExtension deleted an extension
+ DeletedExtension
+ // AddedExtension added an extension
+ AddedExtension
+)
+
+var toLongStringSpecChangeCode = map[SpecChangeCode]string{
+ NoChangeDetected: "No Change detected",
+ AddedEndpoint: "Added endpoint",
+ DeletedEndpoint: "Deleted endpoint",
+ DeletedDeprecatedEndpoint: "Deleted a deprecated endpoint",
+ AddedRequiredProperty: "Added required property",
+ DeletedProperty: "Deleted property",
+ ChangedDescripton: "Changed a description",
+ AddedDescripton: "Added a description",
+ DeletedDescripton: "Deleted a description",
+ ChangedTag: "Changed a tag",
+ AddedTag: "Added a tag",
+ DeletedTag: "Deleted a tag",
+ AddedProperty: "Added property",
+ AddedOptionalParam: "Added optional param",
+ AddedRequiredParam: "Added required param",
+ DeletedOptionalParam: "Deleted optional param",
+ DeletedRequiredParam: "Deleted required param",
+ DeletedResponse: "Deleted response",
+ AddedResponse: "Added response",
+ WidenedType: "Widened type",
+ NarrowedType: "Narrowed type",
+ ChangedType: "Changed type",
+ ChangedToCompatibleType: "Changed type to equivalent type",
+ ChangedOptionalToRequired: "Changed optional param to required",
+ ChangedRequiredToOptional: "Changed required param to optional",
+ AddedEnumValue: "Added possible enumeration(s)",
+ DeletedEnumValue: "Deleted possible enumeration(s)",
+ AddedConsumesFormat: "Added a consumes format",
+ DeletedConsumesFormat: "Deleted a consumes format",
+ AddedProducesFormat: "Added produces format",
+ DeletedProducesFormat: "Deleted produces format",
+ AddedSchemes: "Added schemes",
+ DeletedSchemes: "Deleted schemes",
+ ChangedHostURL: "Changed host URL",
+ ChangedBasePath: "Changed base path",
+ AddedResponseHeader: "Added response header",
+ ChangedResponseHeader: "Changed response header",
+ DeletedResponseHeader: "Deleted response header",
+ RefTargetChanged: "Changed ref to different object",
+ RefTargetRenamed: "Changed ref to renamed object",
+ DeletedConstraint: "Deleted a schema constraint",
+ AddedConstraint: "Added a schema constraint",
+ DeletedDefinition: "Deleted a schema definition",
+ AddedDefinition: "Added a schema definition",
+ ChangedDefault: "Default value is changed",
+ AddedDefault: "Default value is added",
+ DeletedDefault: "Default value is removed",
+ ChangedExample: "Example value is changed",
+ AddedExample: "Example value is added",
+ DeletedExample: "Example value is removed",
+ ChangedCollectionFormat: "Changed collection format",
+ DeletedExtension: "Deleted Extension",
+ AddedExtension: "Added Extension",
+}
+
+var toStringSpecChangeCode = map[SpecChangeCode]string{
+ AddedEndpoint: "AddedEndpoint",
+ NoChangeDetected: "NoChangeDetected",
+ DeletedEndpoint: "DeletedEndpoint",
+ DeletedDeprecatedEndpoint: "DeletedDeprecatedEndpoint",
+ AddedRequiredProperty: "AddedRequiredProperty",
+ DeletedProperty: "DeletedProperty",
+ AddedProperty: "AddedProperty",
+ ChangedDescripton: "ChangedDescription",
+ AddedDescripton: "AddedDescription",
+ DeletedDescripton: "DeletedDescription",
+ ChangedTag: "ChangedTag",
+ AddedTag: "AddedTag",
+ DeletedTag: "DeletedTag",
+ AddedOptionalParam: "AddedOptionalParam",
+ AddedRequiredParam: "AddedRequiredParam",
+ DeletedOptionalParam: "DeletedRequiredParam",
+ DeletedRequiredParam: "Deleted required param",
+ DeletedResponse: "DeletedResponse",
+ AddedResponse: "AddedResponse",
+ WidenedType: "WidenedType",
+ NarrowedType: "NarrowedType",
+ ChangedType: "ChangedType",
+ ChangedToCompatibleType: "ChangedToCompatibleType",
+ ChangedOptionalToRequired: "ChangedOptionalToRequiredParam",
+ ChangedRequiredToOptional: "ChangedRequiredToOptionalParam",
+ AddedEnumValue: "AddedEnumValue",
+ DeletedEnumValue: "DeletedEnumValue",
+ AddedConsumesFormat: "AddedConsumesFormat",
+ DeletedConsumesFormat: "DeletedConsumesFormat",
+ AddedProducesFormat: "AddedProducesFormat",
+ DeletedProducesFormat: "DeletedProducesFormat",
+ AddedSchemes: "AddedSchemes",
+ DeletedSchemes: "DeletedSchemes",
+ ChangedHostURL: "ChangedHostURL",
+ ChangedBasePath: "ChangedBasePath",
+ AddedResponseHeader: "AddedResponseHeader",
+ ChangedResponseHeader: "ChangedResponseHeader",
+ DeletedResponseHeader: "DeletedResponseHeader",
+ RefTargetChanged: "RefTargetChanged",
+ RefTargetRenamed: "RefTargetRenamed",
+ DeletedConstraint: "DeletedConstraint",
+ AddedConstraint: "AddedConstraint",
+ DeletedDefinition: "DeletedDefinition",
+ AddedDefinition: "AddedDefinition",
+ ChangedDefault: "ChangedDefault",
+ AddedDefault: "AddedDefault",
+ DeletedDefault: "DeletedDefault",
+ ChangedExample: "ChangedExample",
+ AddedExample: "AddedExample",
+ DeletedExample: "DeletedExample",
+ ChangedCollectionFormat: "ChangedCollectionFormat",
+ DeletedExtension: "DeletedExtension",
+ AddedExtension: "AddedExtension",
+}
+
+var toIDSpecChangeCode = map[string]SpecChangeCode{}
+
+// Description returns an english version of this error
+func (s SpecChangeCode) Description() (result string) {
+ result, ok := toLongStringSpecChangeCode[s]
+ if !ok {
+ log.Printf("warning: No description for %v", s)
+ result = "UNDEFINED"
+ }
+ return
+}
+
+// MarshalJSON marshals the enum as a quoted json string
+func (s SpecChangeCode) MarshalJSON() ([]byte, error) {
+ return stringAsQuotedBytes(toStringSpecChangeCode[s])
+}
+
+// UnmarshalJSON unmashalls a quoted json string to the enum value
+func (s *SpecChangeCode) UnmarshalJSON(b []byte) error {
+ str, err := readStringFromByteStream(b)
+ if err != nil {
+ return err
+ }
+ // Note that if the string cannot be found then it will return an error to the caller.
+ val, ok := toIDSpecChangeCode[str]
+
+ if ok {
+ *s = val
+ } else {
+ return fmt.Errorf("unknown enum value. cannot unmarshal '%s'", str)
+ }
+ return nil
+}
+
+// Compatibility - whether this is a breaking or non-breaking change
+type Compatibility int
+
+const (
+ // Breaking this change could break existing clients
+ Breaking Compatibility = iota
+ // NonBreaking This is a backwards-compatible API change
+ NonBreaking
+ // Warning changes are technically non-breaking but can cause behavior changes in client and thus should be reported differently
+ Warning
+)
+
+func (s Compatibility) String() string {
+ return toStringCompatibility[s]
+}
+
+var toStringCompatibility = map[Compatibility]string{
+ Breaking: "Breaking",
+ NonBreaking: "NonBreaking",
+ Warning: "Warning",
+}
+
+var toIDCompatibility = map[string]Compatibility{}
+
+// MarshalJSON marshals the enum as a quoted json string
+func (s Compatibility) MarshalJSON() ([]byte, error) {
+ return stringAsQuotedBytes(toStringCompatibility[s])
+}
+
+// UnmarshalJSON unmashals a quoted json string to the enum value
+func (s *Compatibility) UnmarshalJSON(b []byte) error {
+ str, err := readStringFromByteStream(b)
+ if err != nil {
+ return err
+ }
+ // Note that if the string cannot be found then it will return an error to the caller.
+ val, ok := toIDCompatibility[str]
+
+ if ok {
+ *s = val
+ } else {
+ return fmt.Errorf("unknown enum value. cannot unmarshal '%s'", str)
+ }
+ return nil
+}
+
+func stringAsQuotedBytes(str string) ([]byte, error) {
+ buffer := bytes.NewBufferString(`"`)
+ buffer.WriteString(str)
+ buffer.WriteString(`"`)
+ return buffer.Bytes(), nil
+}
+
+func readStringFromByteStream(b []byte) (string, error) {
+ var j string
+ err := json.Unmarshal(b, &j)
+ if err != nil {
+ return "", err
+ }
+ return j, nil
+}
+
+func init() {
+ for key, val := range toStringSpecChangeCode {
+ toIDSpecChangeCode[val] = key
+ }
+ for key, val := range toStringCompatibility {
+ toIDCompatibility[val] = key
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/node.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/node.go
new file mode 100644
index 000000000..e1c7c95f1
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/node.go
@@ -0,0 +1,82 @@
+package diff
+
+import (
+ "fmt"
+
+ "github.com/go-openapi/spec"
+)
+
+// Node is the position od a diff in a spec
+type Node struct {
+ Field string `json:"name,omitempty"`
+ TypeName string `json:"type,omitempty"`
+ IsArray bool `json:"is_array,omitempty"`
+ ChildNode *Node `json:"child,omitempty"`
+}
+
+// String std string render
+func (n *Node) String() string {
+ name := n.Field
+ if n.IsArray {
+ name = fmt.Sprintf("%s<array[%s]>", name, n.TypeName)
+ } else if len(n.TypeName) > 0 {
+ name = fmt.Sprintf("%s<%s>", name, n.TypeName)
+ }
+ if n.ChildNode != nil {
+ return fmt.Sprintf("%s.%s", name, n.ChildNode.String())
+ }
+ return name
+}
+
+// AddLeafNode Adds (recursive) a Child to the first non-nil child found
+func (n *Node) AddLeafNode(toAdd *Node) *Node {
+
+ if n.ChildNode == nil {
+ n.ChildNode = toAdd
+ } else {
+ n.ChildNode.AddLeafNode(toAdd)
+ }
+
+ return n
+}
+
+// Copy deep copy of this node and children
+func (n Node) Copy() *Node {
+ newChild := n.ChildNode
+ if newChild != nil {
+ newChild = newChild.Copy()
+ }
+ newNode := Node{
+ Field: n.Field,
+ TypeName: n.TypeName,
+ IsArray: n.IsArray,
+ ChildNode: newChild,
+ }
+
+ return &newNode
+}
+
+func getSchemaDiffNode(name string, schema interface{}) *Node {
+ node := Node{
+ Field: name,
+ }
+ if schema != nil {
+ switch s := schema.(type) {
+ case spec.Refable:
+ node.TypeName, node.IsArray = getSchemaType(s)
+ case *spec.Schema:
+ node.TypeName, node.IsArray = getSchemaType(s.SchemaProps)
+ case spec.SimpleSchema:
+ node.TypeName, node.IsArray = getSchemaType(s)
+ case *spec.SimpleSchema:
+ node.TypeName, node.IsArray = getSchemaType(s)
+ case *spec.SchemaProps:
+ node.TypeName, node.IsArray = getSchemaType(s)
+ case spec.SchemaProps:
+ node.TypeName, node.IsArray = getSchemaType(&s)
+ default:
+ node.TypeName = fmt.Sprintf("Unknown type %v", schema)
+ }
+ }
+ return &node
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/reporting.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/reporting.go
new file mode 100644
index 000000000..7ef627226
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/reporting.go
@@ -0,0 +1,118 @@
+package diff
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io"
+
+ "github.com/go-openapi/spec"
+)
+
+// ArrayType const for array
+var ArrayType = "array"
+
+// ObjectType const for object
+var ObjectType = "object"
+
+// Compare returns the result of analysing breaking and non breaking changes
+// between to Swagger specs
+func Compare(spec1, spec2 *spec.Swagger) (diffs SpecDifferences, err error) {
+ analyser := NewSpecAnalyser()
+ err = analyser.Analyse(spec1, spec2)
+ if err != nil {
+ return nil, err
+ }
+ diffs = analyser.Diffs
+ return
+}
+
+// PathItemOp - combines path and operation into a single keyed entity
+type PathItemOp struct {
+ ParentPathItem *spec.PathItem `json:"pathitem"`
+ Operation *spec.Operation `json:"operation"`
+ Extensions spec.Extensions `json:"extensions"`
+}
+
+// URLMethod - combines url and method into a single keyed entity
+type URLMethod struct {
+ Path string `json:"path"`
+ Method string `json:"method"`
+}
+
+// DataDirection indicates the direction of change Request vs Response
+type DataDirection int
+
+const (
+ // Request Used for messages/param diffs in a request
+ Request DataDirection = iota
+ // Response Used for messages/param diffs in a response
+ Response
+)
+
+func getParams(pathParams, opParams []spec.Parameter, location string) map[string]spec.Parameter {
+ params := map[string]spec.Parameter{}
+ // add shared path params
+ for _, eachParam := range pathParams {
+ if eachParam.In == location {
+ params[eachParam.Name] = eachParam
+ }
+ }
+ // add any overridden params
+ for _, eachParam := range opParams {
+ if eachParam.In == location {
+ params[eachParam.Name] = eachParam
+ }
+ }
+ return params
+}
+
+func getNameOnlyDiffNode(forLocation string) *Node {
+ node := Node{
+ Field: forLocation,
+ }
+ return &node
+}
+
+func primitiveTypeString(typeName, typeFormat string) string {
+ if typeFormat != "" {
+ return fmt.Sprintf("%s.%s", typeName, typeFormat)
+ }
+ return typeName
+}
+
+// TypeDiff - describes a primitive type change
+type TypeDiff struct {
+ Change SpecChangeCode `json:"change-type,omitempty"`
+ Description string `json:"description,omitempty"`
+ FromType string `json:"from-type,omitempty"`
+ ToType string `json:"to-type,omitempty"`
+}
+
+// didn't use 'width' so as not to confuse with bit width
+var numberWideness = map[string]int{
+ "number": 3,
+ "number.double": 3,
+ "double": 3,
+ "number.float": 2,
+ "float": 2,
+ "long": 1,
+ "integer.int64": 1,
+ "integer": 0,
+ "integer.int32": 0,
+}
+
+func prettyprint(b []byte) (io.ReadWriter, error) {
+ var out bytes.Buffer
+ err := json.Indent(&out, b, "", " ")
+ return &out, err
+}
+
+// JSONMarshal allows the item to be correctly rendered to json
+func JSONMarshal(t interface{}) ([]byte, error) {
+ buffer := &bytes.Buffer{}
+ encoder := json.NewEncoder(buffer)
+ encoder.SetEscapeHTML(false)
+ err := encoder.Encode(t)
+ return buffer.Bytes(), err
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/schema.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/schema.go
new file mode 100644
index 000000000..0874154bb
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/schema.go
@@ -0,0 +1,126 @@
+package diff
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+func getTypeFromSchema(schema *spec.Schema) (typeName string, isArray bool) {
+ refStr := definitionFromRef(schema.Ref)
+ if len(refStr) > 0 {
+ return refStr, false
+ }
+ typeName = schema.Type[0]
+ if typeName == ArrayType {
+ typeName, _ = getSchemaType(&schema.Items.Schema.SchemaProps)
+ return typeName, true
+ }
+ return typeName, false
+
+}
+
+func getTypeFromSimpleSchema(schema *spec.SimpleSchema) (typeName string, isArray bool) {
+ typeName = schema.Type
+ format := schema.Format
+ if len(format) > 0 {
+ typeName = fmt.Sprintf("%s.%s", typeName, format)
+ }
+ if typeName == ArrayType {
+ typeName, _ = getSchemaType(&schema.Items.SimpleSchema)
+ return typeName, true
+ }
+ return typeName, false
+
+}
+
+func getTypeFromSchemaProps(schema *spec.SchemaProps) (typeName string, isArray bool) {
+ refStr := definitionFromRef(schema.Ref)
+ if len(refStr) > 0 {
+ return refStr, false
+ }
+ if len(schema.Type) > 0 {
+ typeName = schema.Type[0]
+ format := schema.Format
+ if len(format) > 0 {
+ typeName = fmt.Sprintf("%s.%s", typeName, format)
+ }
+ if typeName == ArrayType {
+ typeName, _ = getSchemaType(&schema.Items.Schema.SchemaProps)
+ return typeName, true
+ }
+ }
+ return typeName, false
+
+}
+
+func getSchemaTypeStr(item interface{}) string {
+ typeStr, isArray := getSchemaType(item)
+ return formatTypeString(typeStr, isArray)
+}
+
+func getSchemaType(item interface{}) (typeName string, isArray bool) {
+
+ switch s := item.(type) {
+ case *spec.Schema:
+ typeName, isArray = getTypeFromSchema(s)
+ case *spec.SchemaProps:
+ typeName, isArray = getTypeFromSchemaProps(s)
+ case spec.SchemaProps:
+ typeName, isArray = getTypeFromSchemaProps(&s)
+ case spec.SimpleSchema:
+ typeName, isArray = getTypeFromSimpleSchema(&s)
+ case *spec.SimpleSchema:
+ typeName, isArray = getTypeFromSimpleSchema(s)
+ default:
+ typeName = "unknown"
+ }
+
+ return
+
+}
+
+func formatTypeString(typ string, isarray bool) string {
+ if isarray {
+ return fmt.Sprintf("<array[%s]>", typ)
+ }
+ return fmt.Sprintf("<%s>", typ)
+}
+
+func definitionFromRef(ref spec.Ref) string {
+ url := ref.GetURL()
+ if url == nil {
+ return ""
+ }
+ fragmentParts := strings.Split(url.Fragment, "/")
+ numParts := len(fragmentParts)
+
+ return fragmentParts[numParts-1]
+}
+
+func isArray(item interface{}) bool {
+ switch s := item.(type) {
+ case *spec.Schema:
+ return isArrayType(s.Type)
+ case *spec.SchemaProps:
+ return isArrayType(s.Type)
+ case *spec.SimpleSchema:
+ return isArrayType(spec.StringOrArray{s.Type})
+ default:
+ return false
+ }
+}
+
+func isPrimitive(item interface{}) bool {
+ switch s := item.(type) {
+ case *spec.Schema:
+ return isPrimitiveType(s.Type)
+ case *spec.SchemaProps:
+ return isPrimitiveType(s.Type)
+ case spec.StringOrArray:
+ return isPrimitiveType(s)
+ default:
+ return false
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go
new file mode 100644
index 000000000..8df44aeb2
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go
@@ -0,0 +1,759 @@
+package diff
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+// StringType For identifying string types
+const StringType = "string"
+
+// URLMethodResponse encapsulates these three elements to act as a map key
+type URLMethodResponse struct {
+ Path string `json:"path"`
+ Method string `json:"method"`
+ Response string `json:"response"`
+}
+
+// MarshalText - for serializing as a map key
+func (p URLMethod) MarshalText() (text []byte, err error) {
+ return []byte(fmt.Sprintf("%s %s", p.Path, p.Method)), nil
+}
+
+// URLMethods allows iteration of endpoints based on url and method
+type URLMethods map[URLMethod]*PathItemOp
+
+// SpecAnalyser contains all the differences for a Spec
+type SpecAnalyser struct {
+ Diffs SpecDifferences
+ urlMethods1 URLMethods
+ urlMethods2 URLMethods
+ Definitions1 spec.Definitions
+ Definitions2 spec.Definitions
+ Info1 *spec.Info
+ Info2 *spec.Info
+ ReferencedDefinitions map[string]bool
+
+ schemasCompared map[string]struct{}
+}
+
+// NewSpecAnalyser returns an empty SpecDiffs
+func NewSpecAnalyser() *SpecAnalyser {
+ return &SpecAnalyser{
+ Diffs: SpecDifferences{},
+ ReferencedDefinitions: map[string]bool{},
+ }
+}
+
+// Analyse the differences in two specs
+func (sd *SpecAnalyser) Analyse(spec1, spec2 *spec.Swagger) error {
+ sd.schemasCompared = make(map[string]struct{})
+ sd.Definitions1 = spec1.Definitions
+ sd.Definitions2 = spec2.Definitions
+ sd.Info1 = spec1.Info
+ sd.Info2 = spec2.Info
+ sd.urlMethods1 = getURLMethodsFor(spec1)
+ sd.urlMethods2 = getURLMethodsFor(spec2)
+
+ sd.analyseSpecMetadata(spec1, spec2)
+ sd.analyseEndpoints()
+ sd.analyseRequestParams()
+ sd.analyseEndpointData()
+ sd.analyseResponseParams()
+ sd.analyseExtensions(spec1, spec2)
+ sd.AnalyseDefinitions()
+
+ return nil
+}
+
+func (sd *SpecAnalyser) analyseSpecMetadata(spec1, spec2 *spec.Swagger) {
+ // breaking if it no longer consumes any formats
+ added, deleted, _ := fromStringArray(spec1.Consumes).DiffsTo(spec2.Consumes)
+
+ node := getNameOnlyDiffNode("Spec")
+ location := DifferenceLocation{Node: node}
+ consumesLoation := location.AddNode(getNameOnlyDiffNode("consumes"))
+
+ for _, eachAdded := range added {
+ sd.Diffs = sd.Diffs.addDiff(
+ SpecDifference{DifferenceLocation: consumesLoation, Code: AddedConsumesFormat, Compatibility: NonBreaking, DiffInfo: eachAdded})
+ }
+ for _, eachDeleted := range deleted {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: consumesLoation, Code: DeletedConsumesFormat, Compatibility: Breaking, DiffInfo: eachDeleted})
+ }
+
+ // // breaking if it no longer produces any formats
+ added, deleted, _ = fromStringArray(spec1.Produces).DiffsTo(spec2.Produces)
+ producesLocation := location.AddNode(getNameOnlyDiffNode("produces"))
+ for _, eachAdded := range added {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: producesLocation, Code: AddedProducesFormat, Compatibility: NonBreaking, DiffInfo: eachAdded})
+ }
+ for _, eachDeleted := range deleted {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: producesLocation, Code: DeletedProducesFormat, Compatibility: Breaking, DiffInfo: eachDeleted})
+ }
+
+ // // breaking if it no longer supports a scheme
+ added, deleted, _ = fromStringArray(spec1.Schemes).DiffsTo(spec2.Schemes)
+ schemesLocation := location.AddNode(getNameOnlyDiffNode("schemes"))
+
+ for _, eachAdded := range added {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: schemesLocation, Code: AddedSchemes, Compatibility: NonBreaking, DiffInfo: eachAdded})
+ }
+ for _, eachDeleted := range deleted {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: schemesLocation, Code: DeletedSchemes, Compatibility: Breaking, DiffInfo: eachDeleted})
+ }
+
+ // host should be able to change without any issues?
+ sd.analyseMetaDataProperty(spec1.Info.Description, spec2.Info.Description, ChangedDescripton, NonBreaking)
+
+ // // host should be able to change without any issues?
+ sd.analyseMetaDataProperty(spec1.Host, spec2.Host, ChangedHostURL, Breaking)
+ // sd.Host = compareStrings(spec1.Host, spec2.Host)
+
+ // // Base Path change will break non generated clients
+ sd.analyseMetaDataProperty(spec1.BasePath, spec2.BasePath, ChangedBasePath, Breaking)
+
+ // TODO: what to do about security?
+ // Missing security scheme will break a client
+ // Security []map[string][]string `json:"security,omitempty"`
+ // Tags []Tag `json:"tags,omitempty"`
+ // ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
+}
+
+func (sd *SpecAnalyser) analyseEndpoints() {
+ sd.findDeletedEndpoints()
+ sd.findAddedEndpoints()
+}
+
+// AnalyseDefinitions check for changes to definition objects not referenced in any endpoint
+func (sd *SpecAnalyser) AnalyseDefinitions() {
+ alreadyReferenced := map[string]bool{}
+ for k := range sd.ReferencedDefinitions {
+ alreadyReferenced[k] = true
+ }
+ location := DifferenceLocation{Node: &Node{Field: "Spec Definitions"}}
+ for name1, sch := range sd.Definitions1 {
+ schema1 := sch
+ if _, ok := alreadyReferenced[name1]; !ok {
+ childLocation := location.AddNode(&Node{Field: name1})
+ if schema2, ok := sd.Definitions2[name1]; ok {
+ sd.compareSchema(childLocation, &schema1, &schema2)
+ } else {
+ sd.addDiffs(childLocation, []TypeDiff{{Change: DeletedDefinition}})
+ }
+ }
+ }
+ for name2 := range sd.Definitions2 {
+ if _, ok := sd.Definitions1[name2]; !ok {
+ childLocation := location.AddNode(&Node{Field: name2})
+ sd.addDiffs(childLocation, []TypeDiff{{Change: AddedDefinition}})
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyseEndpointData() {
+
+ for URLMethod, op2 := range sd.urlMethods2 {
+ if op1, ok := sd.urlMethods1[URLMethod]; ok {
+ addedTags, deletedTags, _ := fromStringArray(op1.Operation.Tags).DiffsTo(op2.Operation.Tags)
+ location := DifferenceLocation{URL: URLMethod.Path, Method: URLMethod.Method}
+
+ for _, eachAddedTag := range addedTags {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: AddedTag, DiffInfo: fmt.Sprintf(`"%s"`, eachAddedTag)})
+ }
+ for _, eachDeletedTag := range deletedTags {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: DeletedTag, DiffInfo: fmt.Sprintf(`"%s"`, eachDeletedTag)})
+ }
+
+ sd.compareDescripton(location, op1.Operation.Description, op2.Operation.Description)
+
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyseRequestParams() {
+ locations := []string{"query", "path", "body", "header", "formData"}
+
+ for _, paramLocation := range locations {
+ rootNode := getNameOnlyDiffNode(strings.Title(paramLocation))
+ for URLMethod, op2 := range sd.urlMethods2 {
+ if op1, ok := sd.urlMethods1[URLMethod]; ok {
+
+ params1 := getParams(op1.ParentPathItem.Parameters, op1.Operation.Parameters, paramLocation)
+ params2 := getParams(op2.ParentPathItem.Parameters, op2.Operation.Parameters, paramLocation)
+
+ location := DifferenceLocation{URL: URLMethod.Path, Method: URLMethod.Method, Node: rootNode}
+
+ // detect deleted params
+ for paramName1, param1 := range params1 {
+ if _, ok := params2[paramName1]; !ok {
+ childLocation := location.AddNode(getSchemaDiffNode(paramName1, &param1.SimpleSchema))
+ code := DeletedOptionalParam
+ if param1.Required {
+ code = DeletedRequiredParam
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: childLocation, Code: code})
+ }
+ }
+ // detect added changed params
+ for paramName2, param2 := range params2 {
+ // changed?
+ if param1, ok := params1[paramName2]; ok {
+ sd.compareParams(URLMethod, paramLocation, paramName2, param1, param2)
+ } else {
+ // Added
+ childLocation := location.AddNode(getSchemaDiffNode(paramName2, &param2.SimpleSchema))
+ code := AddedOptionalParam
+ if param2.Required {
+ code = AddedRequiredParam
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: childLocation, Code: code})
+ }
+ }
+ }
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyseResponseParams() {
+ // Loop through url+methods in spec 2 - check deleted and changed
+ for eachURLMethodFrom2, op2 := range sd.urlMethods2 {
+
+ // present in both specs? Use key from spec 2 to lookup in spec 1
+ if op1, ok := sd.urlMethods1[eachURLMethodFrom2]; ok {
+ // compare responses for url and method
+ op1Responses := op1.Operation.Responses.StatusCodeResponses
+ op2Responses := op2.Operation.Responses.StatusCodeResponses
+
+ // deleted responses
+ for code1 := range op1Responses {
+ if _, ok := op2Responses[code1]; !ok {
+ location := DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code1, Node: getSchemaDiffNode("Body", op1Responses[code1].Schema)}
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: DeletedResponse})
+ }
+ }
+ // Added updated Response Codes
+ for code2, op2Response := range op2Responses {
+ if op1Response, ok := op1Responses[code2]; ok {
+ op1Headers := op1Response.ResponseProps.Headers
+ headerRootNode := getNameOnlyDiffNode("Headers")
+
+ // Iterate Spec2 Headers looking for added and updated
+ location := DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code2, Node: headerRootNode}
+ for op2HeaderName, op2Header := range op2Response.ResponseProps.Headers {
+ if op1Header, ok := op1Headers[op2HeaderName]; ok {
+ diffs := sd.CompareProps(forHeader(op1Header), forHeader(op2Header))
+ sd.addDiffs(location, diffs)
+ } else {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: location.AddNode(getSchemaDiffNode(op2HeaderName, &op2Header.SimpleSchema)),
+ Code: AddedResponseHeader})
+ }
+ }
+ for op1HeaderName := range op1Response.ResponseProps.Headers {
+ if _, ok := op2Response.ResponseProps.Headers[op1HeaderName]; !ok {
+ op1Header := op1Response.ResponseProps.Headers[op1HeaderName]
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: location.AddNode(getSchemaDiffNode(op1HeaderName, &op1Header.SimpleSchema)),
+ Code: DeletedResponseHeader})
+ }
+ }
+ schem := op1Response.Schema
+ node := getNameOnlyDiffNode("NoContent")
+ if schem != nil {
+ node = getSchemaDiffNode("Body", &schem.SchemaProps)
+ }
+ responseLocation := DifferenceLocation{URL: eachURLMethodFrom2.Path,
+ Method: eachURLMethodFrom2.Method,
+ Response: code2,
+ Node: node}
+ sd.compareDescripton(responseLocation, op1Response.Description, op2Response.Description)
+
+ if op1Response.Schema != nil {
+ sd.compareSchema(
+ DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code2, Node: getSchemaDiffNode("Body", op1Response.Schema)},
+ op1Response.Schema,
+ op2Response.Schema)
+ }
+ } else {
+ // op2Response
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code2, Node: getSchemaDiffNode("Body", op2Response.Schema)},
+ Code: AddedResponse})
+ }
+ }
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyseExtensions(spec1, spec2 *spec.Swagger) {
+ // root
+ specLoc := DifferenceLocation{Node: &Node{Field: "Spec"}}
+ sd.checkAddedExtensions(spec1.Extensions, spec2.Extensions, specLoc, "")
+ sd.checkDeletedExtensions(spec1.Extensions, spec2.Extensions, specLoc, "")
+
+ sd.analyzeInfoExtensions()
+ sd.analyzeTagExtensions(spec1, spec2)
+ sd.analyzeSecurityDefinitionExtensions(spec1, spec2)
+
+ sd.analyzeOperationExtensions()
+}
+
+func (sd *SpecAnalyser) analyzeOperationExtensions() {
+ for urlMethod, op2 := range sd.urlMethods2 {
+ pathAndMethodLoc := DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method}
+ if op1, ok := sd.urlMethods1[urlMethod]; ok {
+ sd.checkAddedExtensions(op1.Extensions, op2.Extensions, DifferenceLocation{URL: urlMethod.Path}, "")
+ sd.checkAddedExtensions(op1.Operation.Responses.Extensions, op2.Operation.Responses.Extensions, pathAndMethodLoc, "Responses")
+ sd.checkAddedExtensions(op1.Operation.Extensions, op2.Operation.Extensions, pathAndMethodLoc, "")
+
+ for code, resp := range op1.Operation.Responses.StatusCodeResponses {
+ for hdr, h := range resp.Headers {
+ op2StatusCode, ok := op2.Operation.Responses.StatusCodeResponses[code]
+ if ok {
+ if _, ok = op2StatusCode.Headers[hdr]; ok {
+ sd.checkAddedExtensions(h.Extensions, op2StatusCode.Headers[hdr].Extensions, DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method, Node: getNameOnlyDiffNode("Headers")}, hdr)
+ }
+ }
+ }
+
+ resp2 := op2.Operation.Responses.StatusCodeResponses[code]
+ sd.analyzeSchemaExtensions(resp.Schema, resp2.Schema, code, urlMethod)
+ }
+
+ }
+ }
+
+ for urlMethod, op1 := range sd.urlMethods1 {
+ pathAndMethodLoc := DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method}
+ if op2, ok := sd.urlMethods2[urlMethod]; ok {
+ sd.checkDeletedExtensions(op1.Extensions, op2.Extensions, DifferenceLocation{URL: urlMethod.Path}, "")
+ sd.checkDeletedExtensions(op1.Operation.Responses.Extensions, op2.Operation.Responses.Extensions, pathAndMethodLoc, "Responses")
+ sd.checkDeletedExtensions(op1.Operation.Extensions, op2.Operation.Extensions, pathAndMethodLoc, "")
+ for code, resp := range op1.Operation.Responses.StatusCodeResponses {
+ for hdr, h := range resp.Headers {
+ op2StatusCode, ok := op2.Operation.Responses.StatusCodeResponses[code]
+ if ok {
+ if _, ok = op2StatusCode.Headers[hdr]; ok {
+ sd.checkDeletedExtensions(h.Extensions, op2StatusCode.Headers[hdr].Extensions, DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method, Node: getNameOnlyDiffNode("Headers")}, hdr)
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyzeSecurityDefinitionExtensions(spec1 *spec.Swagger, spec2 *spec.Swagger) {
+ securityDefLoc := DifferenceLocation{Node: &Node{Field: "Security Definitions"}}
+ for key, securityDef := range spec1.SecurityDefinitions {
+ if securityDef2, ok := spec2.SecurityDefinitions[key]; ok {
+ sd.checkAddedExtensions(securityDef.Extensions, securityDef2.Extensions, securityDefLoc, "")
+ }
+ }
+
+ for key, securityDef := range spec2.SecurityDefinitions {
+ if securityDef1, ok := spec1.SecurityDefinitions[key]; ok {
+ sd.checkDeletedExtensions(securityDef1.Extensions, securityDef.Extensions, securityDefLoc, "")
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyzeSchemaExtensions(schema1, schema2 *spec.Schema, code int, urlMethod URLMethod) {
+ if schema1 != nil && schema2 != nil {
+ diffLoc := DifferenceLocation{Response: code, URL: urlMethod.Path, Method: urlMethod.Method, Node: getSchemaDiffNode("Body", schema2)}
+ sd.checkAddedExtensions(schema1.Extensions, schema2.Extensions, diffLoc, "")
+ sd.checkDeletedExtensions(schema1.Extensions, schema2.Extensions, diffLoc, "")
+ if schema1.Items != nil && schema2.Items != nil {
+ sd.analyzeSchemaExtensions(schema1.Items.Schema, schema2.Items.Schema, code, urlMethod)
+ for i := range schema1.Items.Schemas {
+ s1 := schema1.Items.Schemas[i]
+ for j := range schema2.Items.Schemas {
+ s2 := schema2.Items.Schemas[j]
+ sd.analyzeSchemaExtensions(&s1, &s2, code, urlMethod)
+ }
+ }
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyzeInfoExtensions() {
+ if sd.Info1 != nil && sd.Info2 != nil {
+ diffLocation := DifferenceLocation{Node: &Node{Field: "Spec Info"}}
+ sd.checkAddedExtensions(sd.Info1.Extensions, sd.Info2.Extensions, diffLocation, "")
+ sd.checkDeletedExtensions(sd.Info1.Extensions, sd.Info2.Extensions, diffLocation, "")
+ if sd.Info1.Contact != nil && sd.Info2.Contact != nil {
+ diffLocation = DifferenceLocation{Node: &Node{Field: "Spec Info.Contact"}}
+ sd.checkAddedExtensions(sd.Info1.Contact.Extensions, sd.Info2.Contact.Extensions, diffLocation, "")
+ sd.checkDeletedExtensions(sd.Info1.Contact.Extensions, sd.Info2.Contact.Extensions, diffLocation, "")
+ }
+ if sd.Info1.License != nil && sd.Info2.License != nil {
+ diffLocation = DifferenceLocation{Node: &Node{Field: "Spec Info.License"}}
+ sd.checkAddedExtensions(sd.Info1.License.Extensions, sd.Info2.License.Extensions, diffLocation, "")
+ sd.checkDeletedExtensions(sd.Info1.License.Extensions, sd.Info2.License.Extensions, diffLocation, "")
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyzeTagExtensions(spec1 *spec.Swagger, spec2 *spec.Swagger) {
+ diffLocation := DifferenceLocation{Node: &Node{Field: "Spec Tags"}}
+ for _, spec2Tag := range spec2.Tags {
+ for _, spec1Tag := range spec1.Tags {
+ if spec2Tag.Name == spec1Tag.Name {
+ sd.checkAddedExtensions(spec1Tag.Extensions, spec2Tag.Extensions, diffLocation, "")
+ }
+ }
+ }
+ for _, spec1Tag := range spec1.Tags {
+ for _, spec2Tag := range spec2.Tags {
+ if spec1Tag.Name == spec2Tag.Name {
+ sd.checkDeletedExtensions(spec1Tag.Extensions, spec2Tag.Extensions, diffLocation, "")
+ }
+ }
+ }
+}
+
+func (sd *SpecAnalyser) checkAddedExtensions(extensions1 spec.Extensions, extensions2 spec.Extensions, diffLocation DifferenceLocation, fieldPrefix string) {
+ for extKey := range extensions2 {
+ if _, ok := extensions1[extKey]; !ok {
+ if fieldPrefix != "" {
+ extKey = fmt.Sprintf("%s.%s", fieldPrefix, extKey)
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: diffLocation.AddNode(&Node{Field: extKey}),
+ Code: AddedExtension,
+ Compatibility: Warning, // this could potentially be a breaking change
+ })
+ }
+ }
+}
+
+func (sd *SpecAnalyser) checkDeletedExtensions(extensions1 spec.Extensions, extensions2 spec.Extensions, diffLocation DifferenceLocation, fieldPrefix string) {
+ for extKey := range extensions1 {
+ if _, ok := extensions2[extKey]; !ok {
+ if fieldPrefix != "" {
+ extKey = fmt.Sprintf("%s.%s", fieldPrefix, extKey)
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: diffLocation.AddNode(&Node{Field: extKey}),
+ Code: DeletedExtension,
+ Compatibility: Warning, // this could potentially be a breaking change
+ })
+ }
+ }
+}
+
+func addTypeDiff(diffs []TypeDiff, diff TypeDiff) []TypeDiff {
+ if diff.Change != NoChangeDetected {
+ diffs = append(diffs, diff)
+ }
+ return diffs
+}
+
+// CompareProps computes type specific property diffs
+func (sd *SpecAnalyser) CompareProps(type1, type2 *spec.SchemaProps) []TypeDiff {
+
+ diffs := []TypeDiff{}
+
+ diffs = CheckToFromPrimitiveType(diffs, type1, type2)
+
+ if len(diffs) > 0 {
+ return diffs
+ }
+
+ if isArray(type1) {
+ maxItemDiffs := CompareIntValues("MaxItems", type1.MaxItems, type2.MaxItems, WidenedType, NarrowedType)
+ diffs = append(diffs, maxItemDiffs...)
+ minItemsDiff := CompareIntValues("MinItems", type1.MinItems, type2.MinItems, NarrowedType, WidenedType)
+ diffs = append(diffs, minItemsDiff...)
+ }
+
+ if len(diffs) > 0 {
+ return diffs
+ }
+
+ diffs = CheckRefChange(diffs, type1, type2)
+ if len(diffs) > 0 {
+ return diffs
+ }
+
+ if !(isPrimitiveType(type1.Type) && isPrimitiveType(type2.Type)) {
+ return diffs
+ }
+
+ // check primitive type hierarchy change eg string -> integer = NarrowedChange
+ if type1.Type[0] != type2.Type[0] ||
+ type1.Format != type2.Format {
+ diff := getTypeHierarchyChange(primitiveTypeString(type1.Type[0], type1.Format), primitiveTypeString(type2.Type[0], type2.Format))
+ diffs = addTypeDiff(diffs, diff)
+ }
+
+ diffs = CheckStringTypeChanges(diffs, type1, type2)
+
+ if len(diffs) > 0 {
+ return diffs
+ }
+
+ diffs = checkNumericTypeChanges(diffs, type1, type2)
+
+ if len(diffs) > 0 {
+ return diffs
+ }
+
+ return diffs
+}
+
+func (sd *SpecAnalyser) compareParams(urlMethod URLMethod, location string, name string, param1, param2 spec.Parameter) {
+ diffLocation := DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method}
+
+ childLocation := diffLocation.AddNode(getNameOnlyDiffNode(strings.Title(location)))
+ paramLocation := diffLocation.AddNode(getNameOnlyDiffNode(name))
+ sd.compareDescripton(paramLocation, param1.Description, param2.Description)
+
+ if param1.Schema != nil && param2.Schema != nil {
+ if len(name) > 0 {
+ childLocation = childLocation.AddNode(getSchemaDiffNode(name, param2.Schema))
+ }
+ sd.compareSchema(childLocation, param1.Schema, param2.Schema)
+ }
+
+ diffs := sd.CompareProps(forParam(param1), forParam(param2))
+
+ childLocation = childLocation.AddNode(getSchemaDiffNode(name, &param2.SimpleSchema))
+ if len(diffs) > 0 {
+ sd.addDiffs(childLocation, diffs)
+ }
+
+ diffs = CheckToFromRequired(param1.Required, param2.Required)
+ if len(diffs) > 0 {
+ sd.addDiffs(childLocation, diffs)
+ }
+
+ sd.compareSimpleSchema(childLocation, &param1.SimpleSchema, &param2.SimpleSchema)
+}
+
+func (sd *SpecAnalyser) addTypeDiff(location DifferenceLocation, diff *TypeDiff) {
+ diffCopy := diff
+ desc := diffCopy.Description
+ if len(desc) == 0 {
+ if diffCopy.FromType != diffCopy.ToType {
+ desc = fmt.Sprintf("%s -> %s", diffCopy.FromType, diffCopy.ToType)
+ }
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: location,
+ Code: diffCopy.Change,
+ DiffInfo: desc})
+}
+
+func (sd *SpecAnalyser) compareDescripton(location DifferenceLocation, desc1, desc2 string) {
+ if desc1 != desc2 {
+ code := ChangedDescripton
+ if len(desc1) > 0 {
+ code = DeletedDescripton
+ } else if len(desc2) > 0 {
+ code = AddedDescripton
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: code})
+ }
+}
+
+func isPrimitiveType(item spec.StringOrArray) bool {
+ return len(item) > 0 && item[0] != ArrayType && item[0] != ObjectType
+}
+
+func isArrayType(item spec.StringOrArray) bool {
+ return len(item) > 0 && item[0] == ArrayType
+}
+func (sd *SpecAnalyser) getRefSchemaFromSpec1(ref spec.Ref) (*spec.Schema, string) {
+ return sd.schemaFromRef(ref, &sd.Definitions1)
+}
+
+func (sd *SpecAnalyser) getRefSchemaFromSpec2(ref spec.Ref) (*spec.Schema, string) {
+ return sd.schemaFromRef(ref, &sd.Definitions2)
+}
+
+// CompareSchemaFn Fn spec for comparing schemas
+type CompareSchemaFn func(location DifferenceLocation, schema1, schema2 *spec.Schema)
+
+func (sd *SpecAnalyser) compareSchema(location DifferenceLocation, schema1, schema2 *spec.Schema) {
+
+ refDiffs := []TypeDiff{}
+ refDiffs = CheckRefChange(refDiffs, schema1, schema2)
+ if len(refDiffs) > 0 {
+ for _, d := range refDiffs {
+ diff := d
+ sd.addTypeDiff(location, &diff)
+ }
+ return
+ }
+
+ if isRefType(schema1) {
+ key := schemaLocationKey(location)
+ if _, ok := sd.schemasCompared[key]; ok {
+ return
+ }
+ sd.schemasCompared[key] = struct{}{}
+ schema1, _ = sd.schemaFromRef(getRef(schema1), &sd.Definitions1)
+ }
+
+ if isRefType(schema2) {
+ schema2, _ = sd.schemaFromRef(getRef(schema2), &sd.Definitions2)
+ }
+
+ sd.compareDescripton(location, schema1.Description, schema2.Description)
+
+ typeDiffs := sd.CompareProps(&schema1.SchemaProps, &schema2.SchemaProps)
+ if len(typeDiffs) > 0 {
+ sd.addDiffs(location, typeDiffs)
+ return
+ }
+
+ if isArray(schema1) {
+ if isArray(schema2) {
+ sd.compareSchema(location, schema1.Items.Schema, schema2.Items.Schema)
+ } else {
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedType, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+ }
+
+ diffs := CompareProperties(location, schema1, schema2, sd.getRefSchemaFromSpec1, sd.getRefSchemaFromSpec2, sd.compareSchema)
+ for _, diff := range diffs {
+ sd.Diffs = sd.Diffs.addDiff(diff)
+ }
+}
+
+func (sd *SpecAnalyser) compareSimpleSchema(location DifferenceLocation, schema1, schema2 *spec.SimpleSchema) {
+ // check optional/required
+ if schema1.Nullable != schema2.Nullable {
+ // If optional is made required
+ if schema1.Nullable && !schema2.Nullable {
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedOptionalToRequired, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ } else if !schema1.Nullable && schema2.Nullable {
+ // If required is made optional
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedRequiredToOptional, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+ }
+
+ if schema1.CollectionFormat != schema2.CollectionFormat {
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedCollectionFormat, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+
+ if schema1.Default != schema2.Default {
+ switch {
+ case schema1.Default == nil && schema2.Default != nil:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: AddedDefault, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ case schema1.Default != nil && schema2.Default == nil:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: DeletedDefault, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ default:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedDefault, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+ }
+
+ if schema1.Example != schema2.Example {
+ switch {
+ case schema1.Example == nil && schema2.Example != nil:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: AddedExample, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ case schema1.Example != nil && schema2.Example == nil:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: DeletedExample, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ default:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedExample, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+ }
+
+ if isArray(schema1) {
+ if isArray(schema2) {
+ sd.compareSimpleSchema(location, &schema1.Items.SimpleSchema, &schema2.Items.SimpleSchema)
+ } else {
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedType, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+ }
+}
+
+func (sd *SpecAnalyser) addDiffs(location DifferenceLocation, diffs []TypeDiff) {
+ for _, e := range diffs {
+ eachTypeDiff := e
+ if eachTypeDiff.Change != NoChangeDetected {
+ sd.addTypeDiff(location, &eachTypeDiff)
+ }
+ }
+}
+
+func addChildDiffNode(location DifferenceLocation, propName string, propSchema *spec.Schema) DifferenceLocation {
+ newNode := location.Node
+ childNode := fromSchemaProps(propName, &propSchema.SchemaProps)
+ if newNode != nil {
+ newNode = newNode.Copy()
+ newNode.AddLeafNode(&childNode)
+ } else {
+ newNode = &childNode
+ }
+ return DifferenceLocation{
+ URL: location.URL,
+ Method: location.Method,
+ Response: location.Response,
+ Node: newNode,
+ }
+}
+
+func fromSchemaProps(fieldName string, props *spec.SchemaProps) Node {
+ node := Node{}
+ node.TypeName, node.IsArray = getSchemaType(props)
+ node.Field = fieldName
+ return node
+}
+
+func (sd *SpecAnalyser) findAddedEndpoints() {
+ for URLMethod := range sd.urlMethods2 {
+ if _, ok := sd.urlMethods1[URLMethod]; !ok {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: DifferenceLocation{URL: URLMethod.Path, Method: URLMethod.Method}, Code: AddedEndpoint})
+ }
+ }
+}
+
+func (sd *SpecAnalyser) findDeletedEndpoints() {
+ for eachURLMethod, operation1 := range sd.urlMethods1 {
+ code := DeletedEndpoint
+ if (operation1.ParentPathItem.Options != nil && operation1.ParentPathItem.Options.Deprecated) ||
+ (operation1.Operation.Deprecated) {
+ code = DeletedDeprecatedEndpoint
+ }
+ if _, ok := sd.urlMethods2[eachURLMethod]; !ok {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: DifferenceLocation{URL: eachURLMethod.Path, Method: eachURLMethod.Method}, Code: code})
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyseMetaDataProperty(item1, item2 string, codeIfDiff SpecChangeCode, compatIfDiff Compatibility) {
+ if item1 != item2 {
+ diffSpec := fmt.Sprintf("%s -> %s", item1, item2)
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: DifferenceLocation{Node: &Node{Field: "Spec Metadata"}}, Code: codeIfDiff, Compatibility: compatIfDiff, DiffInfo: diffSpec})
+ }
+}
+
+func (sd *SpecAnalyser) schemaFromRef(ref spec.Ref, defns *spec.Definitions) (actualSchema *spec.Schema, definitionName string) {
+ definitionName = definitionFromRef(ref)
+ foundSchema, ok := (*defns)[definitionName]
+ if !ok {
+ return nil, definitionName
+ }
+ sd.ReferencedDefinitions[definitionName] = true
+ actualSchema = &foundSchema
+ return
+
+}
+
+func schemaLocationKey(location DifferenceLocation) string {
+ return location.Method + location.URL + location.Node.Field + location.Node.TypeName
+}
+
+// PropertyDefn combines a property with its required-ness
+type PropertyDefn struct {
+ Schema *spec.Schema
+ Required bool
+}
+
+// PropertyMap a unified map including all AllOf fields
+type PropertyMap map[string]PropertyDefn
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_difference.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_difference.go
new file mode 100644
index 000000000..73e38ce4e
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_difference.go
@@ -0,0 +1,216 @@
+package diff
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "sort"
+ "strings"
+)
+
+// SpecDifference encapsulates the details of an individual diff in part of a spec
+type SpecDifference struct {
+ DifferenceLocation DifferenceLocation `json:"location"`
+ Code SpecChangeCode `json:"code"`
+ Compatibility Compatibility `json:"compatibility"`
+ DiffInfo string `json:"info,omitempty"`
+}
+
+// SpecDifferences list of differences
+type SpecDifferences []SpecDifference
+
+// Matches returns true if the diff matches another
+func (sd SpecDifference) Matches(other SpecDifference) bool {
+ return sd.Code == other.Code &&
+ sd.Compatibility == other.Compatibility &&
+ sd.DiffInfo == other.DiffInfo &&
+ equalLocations(sd.DifferenceLocation, other.DifferenceLocation)
+}
+
+func equalLocations(a, b DifferenceLocation) bool {
+ return a.Method == b.Method &&
+ a.Response == b.Response &&
+ a.URL == b.URL &&
+ equalNodes(a.Node, b.Node)
+}
+
+func equalNodes(a, b *Node) bool {
+ if a == nil && b == nil {
+ return true
+ }
+ if a == nil || b == nil {
+ return false
+ }
+ return a.Field == b.Field &&
+ a.IsArray == b.IsArray &&
+ a.TypeName == b.TypeName &&
+ equalNodes(a.ChildNode, b.ChildNode)
+
+}
+
+// BreakingChangeCount Calculates the breaking change count
+func (sd SpecDifferences) BreakingChangeCount() int {
+ count := 0
+ for _, eachDiff := range sd {
+ if eachDiff.Compatibility == Breaking {
+ count++
+ }
+ }
+ return count
+}
+
+// WarningChangeCount Calculates the warning change count
+func (sd SpecDifferences) WarningChangeCount() int {
+ count := 0
+ for _, eachDiff := range sd {
+ if eachDiff.Compatibility == Warning {
+ count++
+ }
+ }
+ return count
+}
+
+// FilterIgnores returns a copy of the list without the items in the specified ignore list
+func (sd SpecDifferences) FilterIgnores(ignores SpecDifferences) SpecDifferences {
+ newDiffs := SpecDifferences{}
+ for _, eachDiff := range sd {
+ if !ignores.Contains(eachDiff) {
+ newDiffs = newDiffs.addDiff(eachDiff)
+ }
+ }
+ return newDiffs
+}
+
+// Contains Returns true if the item contains the specified item
+func (sd SpecDifferences) Contains(diff SpecDifference) bool {
+ for _, eachDiff := range sd {
+ if eachDiff.Matches(diff) {
+ return true
+ }
+ }
+ return false
+}
+
+// String std string renderer
+func (sd SpecDifference) String() string {
+ isResponse := sd.DifferenceLocation.Response > 0
+ hasMethod := len(sd.DifferenceLocation.Method) > 0
+ hasURL := len(sd.DifferenceLocation.URL) > 0
+
+ prefix := ""
+ direction := ""
+
+ if hasMethod {
+ if hasURL {
+ prefix = fmt.Sprintf("%s:%s", sd.DifferenceLocation.URL, sd.DifferenceLocation.Method)
+ }
+ if isResponse {
+ prefix += fmt.Sprintf(" -> %d", sd.DifferenceLocation.Response)
+ direction = "Response"
+ } else {
+ direction = "Request"
+ }
+ } else {
+ prefix = sd.DifferenceLocation.URL
+ }
+
+ paramOrPropertyLocation := ""
+ if sd.DifferenceLocation.Node != nil {
+ paramOrPropertyLocation = sd.DifferenceLocation.Node.String()
+ }
+ optionalInfo := ""
+ if sd.DiffInfo != "" {
+ optionalInfo = sd.DiffInfo
+ }
+
+ items := []string{}
+ for _, item := range []string{prefix, direction, paramOrPropertyLocation, sd.Code.Description(), optionalInfo} {
+ if item != "" {
+ items = append(items, item)
+ }
+ }
+ return strings.Join(items, " - ")
+ // return fmt.Sprintf("%s%s%s - %s%s", prefix, direction, paramOrPropertyLocation, sd.Code.Description(), optionalInfo)
+}
+
+func (sd SpecDifferences) addDiff(diff SpecDifference) SpecDifferences {
+ context := Request
+ if diff.DifferenceLocation.Response > 0 {
+ context = Response
+ }
+ diff.Compatibility = getCompatibilityForChange(diff.Code, context)
+
+ return append(sd, diff)
+}
+
+// ReportCompatibility lists and spec
+func (sd *SpecDifferences) ReportCompatibility() (io.Reader, error, error) {
+ var out bytes.Buffer
+ breakingCount := sd.BreakingChangeCount()
+ if breakingCount > 0 {
+ if len(*sd) != breakingCount {
+ fmt.Fprintln(&out, "")
+ }
+ fmt.Fprintln(&out, "BREAKING CHANGES:\n=================")
+ _, _ = out.ReadFrom(sd.reportChanges(Breaking))
+ msg := fmt.Sprintf("compatibility test FAILED: %d breaking changes detected", breakingCount)
+ fmt.Fprintln(&out, msg)
+ return &out, nil, errors.New(msg)
+ }
+ fmt.Fprintf(&out, "compatibility test OK. No breaking changes identified.\n")
+ return &out, nil, nil
+}
+
+func (sd SpecDifferences) reportChanges(compat Compatibility) io.Reader {
+ toReportList := []string{}
+ var out bytes.Buffer
+
+ for _, diff := range sd {
+ if diff.Compatibility == compat {
+ toReportList = append(toReportList, diff.String())
+ }
+ }
+
+ sort.Slice(toReportList, func(i, j int) bool {
+ return toReportList[i] < toReportList[j]
+ })
+
+ for _, eachDiff := range toReportList {
+ fmt.Fprintln(&out, eachDiff)
+ }
+ return &out
+}
+
+// ReportAllDiffs lists all the diffs between two specs
+func (sd SpecDifferences) ReportAllDiffs(fmtJSON bool) (io.Reader, error, error) {
+ if fmtJSON {
+ b, err := JSONMarshal(sd)
+ if err != nil {
+ return nil, fmt.Errorf("couldn't print results: %v", err), nil
+ }
+ out, err := prettyprint(b)
+ return out, err, nil
+ }
+ numDiffs := len(sd)
+ if numDiffs == 0 {
+ return bytes.NewBuffer([]byte("No changes identified\n")), nil, nil
+ }
+
+ var out bytes.Buffer
+ if numDiffs != sd.BreakingChangeCount() {
+ fmt.Fprintln(&out, "NON-BREAKING CHANGES:\n=====================")
+ _, _ = out.ReadFrom(sd.reportChanges(NonBreaking))
+ if sd.WarningChangeCount() > 0 {
+ fmt.Fprintln(&out, "\nNON-BREAKING CHANGES WITH WARNING:\n==================================")
+ _, _ = out.ReadFrom(sd.reportChanges(Warning))
+ }
+ }
+
+ more, err, warn := sd.ReportCompatibility()
+ if err != nil {
+ return nil, err, warn
+ }
+ _, _ = out.ReadFrom(more)
+ return &out, nil, warn
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/type_adapters.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/type_adapters.go
new file mode 100644
index 000000000..5679367fd
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/type_adapters.go
@@ -0,0 +1,163 @@
+package diff
+
+import (
+ "github.com/go-openapi/spec"
+)
+
+func forItems(items *spec.Items) *spec.Schema {
+ if items == nil {
+ return nil
+ }
+ valids := items.CommonValidations
+ schema := spec.Schema{
+ SchemaProps: spec.SchemaProps{
+ Type: []string{items.SimpleSchema.Type},
+ Format: items.SimpleSchema.Format,
+ Maximum: valids.Maximum,
+ ExclusiveMaximum: valids.ExclusiveMaximum,
+ Minimum: valids.Minimum,
+ ExclusiveMinimum: valids.ExclusiveMinimum,
+ MaxLength: valids.MaxLength,
+ MinLength: valids.MinLength,
+ Pattern: valids.Pattern,
+ MaxItems: valids.MaxItems,
+ MinItems: valids.MinItems,
+ UniqueItems: valids.UniqueItems,
+ MultipleOf: valids.MultipleOf,
+ Enum: valids.Enum,
+ },
+ }
+ return &schema
+}
+
+func forHeader(header spec.Header) *spec.SchemaProps {
+ return &spec.SchemaProps{
+ Type: []string{header.Type},
+ Format: header.Format,
+ Items: &spec.SchemaOrArray{Schema: forItems(header.Items)},
+ Maximum: header.Maximum,
+ ExclusiveMaximum: header.ExclusiveMaximum,
+ Minimum: header.Minimum,
+ ExclusiveMinimum: header.ExclusiveMinimum,
+ MaxLength: header.MaxLength,
+ MinLength: header.MinLength,
+ Pattern: header.Pattern,
+ MaxItems: header.MaxItems,
+ MinItems: header.MinItems,
+ UniqueItems: header.UniqueItems,
+ MultipleOf: header.MultipleOf,
+ Enum: header.Enum,
+ }
+}
+
+func forParam(param spec.Parameter) *spec.SchemaProps {
+ return &spec.SchemaProps{
+ Type: []string{param.Type},
+ Format: param.Format,
+ Items: &spec.SchemaOrArray{Schema: forItems(param.Items)},
+ Maximum: param.Maximum,
+ ExclusiveMaximum: param.ExclusiveMaximum,
+ Minimum: param.Minimum,
+ ExclusiveMinimum: param.ExclusiveMinimum,
+ MaxLength: param.MaxLength,
+ MinLength: param.MinLength,
+ Pattern: param.Pattern,
+ MaxItems: param.MaxItems,
+ MinItems: param.MinItems,
+ UniqueItems: param.UniqueItems,
+ MultipleOf: param.MultipleOf,
+ Enum: param.Enum,
+ }
+}
+
+// OperationMap saves indexing operations in PathItems individually
+type OperationMap map[string]*spec.Operation
+
+func toMap(item *spec.PathItem) OperationMap {
+ m := make(OperationMap)
+
+ if item.Post != nil {
+ m["post"] = item.Post
+ }
+ if item.Get != nil {
+ m["get"] = item.Get
+ }
+ if item.Put != nil {
+ m["put"] = item.Put
+ }
+ if item.Patch != nil {
+ m["patch"] = item.Patch
+ }
+ if item.Head != nil {
+ m["head"] = item.Head
+ }
+ if item.Options != nil {
+ m["options"] = item.Options
+ }
+ if item.Delete != nil {
+ m["delete"] = item.Delete
+ }
+ return m
+}
+
+func getURLMethodsFor(spec *spec.Swagger) URLMethods {
+ returnURLMethods := URLMethods{}
+
+ for url, eachPath := range spec.Paths.Paths {
+ eachPath := eachPath
+ opsMap := toMap(&eachPath)
+ for method, op := range opsMap {
+ returnURLMethods[URLMethod{url, method}] = &PathItemOp{&eachPath, op, eachPath.Extensions}
+ }
+ }
+ return returnURLMethods
+}
+
+func isStringType(typeName string) bool {
+ return typeName == "string" || typeName == "password"
+}
+
+// SchemaFromRefFn define this to get a schema for a ref
+type SchemaFromRefFn func(spec.Ref) (*spec.Schema, string)
+
+func propertiesFor(schema *spec.Schema, getRefFn SchemaFromRefFn) PropertyMap {
+ if isRefType(schema) {
+ schema, _ = getRefFn(schema.Ref)
+ }
+ props := PropertyMap{}
+
+ requiredProps := schema.Required
+ requiredMap := map[string]bool{}
+ for _, prop := range requiredProps {
+ requiredMap[prop] = true
+ }
+
+ if schema.Properties != nil {
+ for name, prop := range schema.Properties {
+ prop := prop
+ required := requiredMap[name]
+ props[name] = PropertyDefn{Schema: &prop, Required: required}
+ }
+ }
+ for _, e := range schema.AllOf {
+ eachAllOf := e
+ allOfMap := propertiesFor(&eachAllOf, getRefFn)
+ for name, prop := range allOfMap {
+ props[name] = prop
+ }
+ }
+ return props
+}
+
+func getRef(item interface{}) spec.Ref {
+ switch s := item.(type) {
+ case *spec.Refable:
+ return s.Ref
+ case *spec.Schema:
+ return s.Ref
+ case *spec.SchemaProps:
+ return s.Ref
+ default:
+ return spec.Ref{}
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go
new file mode 100644
index 000000000..d8a704673
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go
@@ -0,0 +1,81 @@
+package commands
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log"
+ "os"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+ flags "github.com/jessevdk/go-flags"
+)
+
+// ExpandSpec is a command that expands the $refs in a swagger document.
+//
+// There are no specific options for this expansion.
+type ExpandSpec struct {
+ Compact bool `long:"compact" description:"applies to JSON formatted specs. When present, doesn't prettify the json"`
+ Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
+ Format string `long:"format" description:"the format for the spec document" default:"json" choice:"yaml" choice:"json"`
+}
+
+// Execute expands the spec
+func (c *ExpandSpec) Execute(args []string) error {
+ if len(args) != 1 {
+ return errors.New("expand command requires the single swagger document url to be specified")
+ }
+
+ swaggerDoc := args[0]
+ specDoc, err := loads.Spec(swaggerDoc)
+ if err != nil {
+ return err
+ }
+
+ exp, err := specDoc.Expanded()
+ if err != nil {
+ return err
+ }
+
+ return writeToFile(exp.Spec(), !c.Compact, c.Format, string(c.Output))
+}
+
+func writeToFile(swspec *spec.Swagger, pretty bool, format string, output string) error {
+ var b []byte
+ var err error
+ asJSON := format == "json"
+
+ log.Println("format = ", format)
+ switch {
+ case pretty && asJSON:
+ b, err = json.MarshalIndent(swspec, "", " ")
+ case asJSON:
+ b, err = json.Marshal(swspec)
+ default:
+ // marshals as YAML
+ b, err = json.Marshal(swspec)
+ if err == nil {
+ var data swag.JSONMapSlice
+ if erg := json.Unmarshal(b, &data); erg != nil {
+ log.Fatalln(erg)
+ }
+ var bb interface{}
+ bb, err = data.MarshalYAML()
+ b = bb.([]byte)
+ }
+
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if output == "" {
+ fmt.Println(string(b))
+ return nil
+ }
+
+ return os.WriteFile(output, b, 0644) // #nosec
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/flatten.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/flatten.go
new file mode 100644
index 000000000..b30b50fd5
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/flatten.go
@@ -0,0 +1,48 @@
+package commands
+
+import (
+ "errors"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-swagger/go-swagger/cmd/swagger/commands/generate"
+ flags "github.com/jessevdk/go-flags"
+)
+
+// FlattenSpec is a command that flattens a swagger document
+// which will expand the remote references in a spec and move inline schemas to definitions
+// after flattening there are no complex inlined anymore
+type FlattenSpec struct {
+ Compact bool `long:"compact" description:"applies to JSON formatted specs. When present, doesn't prettify the json"`
+ Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
+ Format string `long:"format" description:"the format for the spec document" default:"json" choice:"yaml" choice:"json"`
+ generate.FlattenCmdOptions
+}
+
+// Execute flattens the spec
+func (c *FlattenSpec) Execute(args []string) error {
+ if len(args) != 1 {
+ return errors.New("flatten command requires the single swagger document url to be specified")
+ }
+
+ swaggerDoc := args[0]
+ specDoc, err := loads.Spec(swaggerDoc)
+ if err != nil {
+ return err
+ }
+
+ flattenOpts := c.FlattenCmdOptions.SetFlattenOptions(&analysis.FlattenOpts{
+ // defaults
+ Minimal: true,
+ Verbose: true,
+ Expand: false,
+ RemoveUnused: false,
+ })
+ flattenOpts.BasePath = specDoc.SpecFilePath()
+ flattenOpts.Spec = analysis.New(specDoc.Spec())
+ if err := analysis.Flatten(*flattenOpts); err != nil {
+ return err
+ }
+
+ return writeToFile(specDoc.Spec(), !c.Compact, c.Format, string(c.Output))
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate.go
new file mode 100644
index 000000000..5f4b8598f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate.go
@@ -0,0 +1,29 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import "github.com/go-swagger/go-swagger/cmd/swagger/commands/generate"
+
+// Generate command to group all generator commands together
+type Generate struct {
+ Model *generate.Model `command:"model"`
+ Operation *generate.Operation `command:"operation"`
+ Support *generate.Support `command:"support"`
+ Server *generate.Server `command:"server"`
+ Spec *generate.SpecFile `command:"spec"`
+ Client *generate.Client `command:"client"`
+ Cli *generate.Cli `command:"cli"`
+ Markdown *generate.Markdown `command:"markdown"`
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/cli.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/cli.go
new file mode 100644
index 000000000..e8ea11c79
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/cli.go
@@ -0,0 +1,26 @@
+package generate
+
+import "github.com/go-swagger/go-swagger/generator"
+
+type Cli struct {
+ // generate a cli includes all client code
+ Client
+ // cmd/<cli-app-name>/main.go will be generated. This ensures that go install will compile the app with desired name.
+ CliAppName string `long:"cli-app-name" description:"the app name for the cli executable. useful for go install." default:"cli"`
+}
+
+func (c Cli) apply(opts *generator.GenOpts) {
+ c.Client.apply(opts)
+ opts.IncludeCLi = true
+ opts.CliPackage = "cli" // hardcoded for now, can be exposed via cmd opt later
+ opts.CliAppName = c.CliAppName
+}
+
+func (c *Cli) generate(opts *generator.GenOpts) error {
+ return c.Client.generate(opts)
+}
+
+// Execute runs this command
+func (c *Cli) Execute(args []string) error {
+ return createSwagger(c)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/client.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/client.go
new file mode 100644
index 000000000..3a78b5622
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/client.go
@@ -0,0 +1,86 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "log"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+type clientOptions struct {
+ ClientPackage string `long:"client-package" short:"c" description:"the package to save the client specific code" default:"client"`
+}
+
+func (co clientOptions) apply(opts *generator.GenOpts) {
+ opts.ClientPackage = co.ClientPackage
+}
+
+// Client the command to generate a swagger client
+type Client struct {
+ WithShared
+ WithModels
+ WithOperations
+
+ clientOptions
+ schemeOptions
+ mediaOptions
+
+ SkipModels bool `long:"skip-models" description:"no models will be generated when this flag is specified"`
+ SkipOperations bool `long:"skip-operations" description:"no operations will be generated when this flag is specified"`
+
+ Name string `long:"name" short:"A" description:"the name of the application, defaults to a mangled value of info.title"`
+}
+
+func (c Client) apply(opts *generator.GenOpts) {
+ c.Shared.apply(opts)
+ c.Models.apply(opts)
+ c.Operations.apply(opts)
+ c.clientOptions.apply(opts)
+ c.schemeOptions.apply(opts)
+ c.mediaOptions.apply(opts)
+
+ opts.IncludeModel = !c.SkipModels
+ opts.IncludeValidator = !c.SkipModels
+ opts.IncludeHandler = !c.SkipOperations
+ opts.IncludeParameters = !c.SkipOperations
+ opts.IncludeResponses = !c.SkipOperations
+ opts.Name = c.Name
+
+ opts.IsClient = true
+ opts.IncludeSupport = true
+}
+
+func (c *Client) generate(opts *generator.GenOpts) error {
+ return generator.GenerateClient(c.Name, c.Models.Models, c.Operations.Operations, opts)
+}
+
+func (c *Client) log(rp string) {
+ log.Println(`Generation completed!
+
+For this generation to compile you need to have some packages in your go.mod:
+
+ * github.com/go-openapi/errors
+ * github.com/go-openapi/runtime
+ * github.com/go-openapi/runtime/client
+ * github.com/go-openapi/strfmt
+
+You can get these now with: go mod tidy`)
+}
+
+// Execute runs this command
+func (c *Client) Execute(args []string) error {
+ return createSwagger(c)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/contrib.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/contrib.go
new file mode 100644
index 000000000..196558e70
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/contrib.go
@@ -0,0 +1,17 @@
+package generate
+
+import (
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+// contribOptionsOverride gives contributed templates the ability to override the options if they need
+func contribOptionsOverride(opts *generator.GenOpts) {
+ // nolint: gocritic
+ switch opts.Template {
+ case "stratoscale":
+ // Stratoscale template needs to regenerate the configureapi on every run.
+ opts.RegenerateConfigureAPI = true
+ // It also does not use the main.go
+ opts.IncludeMain = false
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/markdown.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/markdown.go
new file mode 100644
index 000000000..ba9df3812
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/markdown.go
@@ -0,0 +1,33 @@
+package generate
+
+import (
+ "github.com/go-swagger/go-swagger/generator"
+ "github.com/jessevdk/go-flags"
+)
+
+// Markdown generates a markdown representation of the spec
+type Markdown struct {
+ WithShared
+ WithModels
+ WithOperations
+
+ Output flags.Filename `long:"output" short:"" description:"the file to write the generated markdown." default:"markdown.md"`
+}
+
+func (m Markdown) apply(opts *generator.GenOpts) {
+ m.Shared.apply(opts)
+ m.Models.apply(opts)
+ m.Operations.apply(opts)
+}
+
+func (m *Markdown) generate(opts *generator.GenOpts) error {
+ return generator.GenerateMarkdown(string(m.Output), m.Models.Models, m.Operations.Operations, opts)
+}
+
+func (m Markdown) log(rp string) {
+}
+
+// Execute runs this command
+func (m *Markdown) Execute(args []string) error {
+ return createSwagger(m)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go
new file mode 100644
index 000000000..fb8c14268
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go
@@ -0,0 +1,98 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "errors"
+ "log"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+type modelOptions struct {
+ ModelPackage string `long:"model-package" short:"m" description:"the package to save the models" default:"models"`
+ Models []string `long:"model" short:"M" description:"specify a model to include in generation, repeat for multiple (defaults to all)"`
+ ExistingModels string `long:"existing-models" description:"use pre-generated models e.g. github.com/foobar/model"`
+ StrictAdditionalProperties bool `long:"strict-additional-properties" description:"disallow extra properties when additionalProperties is set to false"`
+ KeepSpecOrder bool `long:"keep-spec-order" description:"keep schema properties order identical to spec file"`
+ AllDefinitions bool `long:"all-definitions" description:"generate all model definitions regardless of usage in operations" hidden:"deprecated"`
+ StructTags []string `long:"struct-tags" description:"the struct tags to generate, repeat for multiple (defaults to json)"`
+}
+
+func (mo modelOptions) apply(opts *generator.GenOpts) {
+ opts.ModelPackage = mo.ModelPackage
+ opts.Models = mo.Models
+ opts.ExistingModels = mo.ExistingModels
+ opts.StrictAdditionalProperties = mo.StrictAdditionalProperties
+ opts.PropertiesSpecOrder = mo.KeepSpecOrder
+ opts.IgnoreOperations = mo.AllDefinitions
+ opts.StructTags = mo.StructTags
+}
+
+// WithModels adds the model options group.
+//
+// This group is available to all commands that need some model generation.
+type WithModels struct {
+ Models modelOptions `group:"Options for model generation"`
+}
+
+// Model the generate model file command.
+//
+// Define the options that are specific to the "swagger generate model" command.
+type Model struct {
+ WithShared
+ WithModels
+
+ NoStruct bool `long:"skip-struct" description:"when present will not generate the model struct" hidden:"deprecated"`
+ Name []string `long:"name" short:"n" description:"the model to generate, repeat for multiple (defaults to all). Same as --models"`
+ AcceptDefinitionsOnly bool `long:"accept-definitions-only" description:"accepts a partial swagger spec with only the definitions key"`
+}
+
+func (m Model) apply(opts *generator.GenOpts) {
+ m.Shared.apply(opts)
+ m.Models.apply(opts)
+
+ opts.IncludeModel = !m.NoStruct
+ opts.IncludeValidator = !m.NoStruct
+ opts.AcceptDefinitionsOnly = m.AcceptDefinitionsOnly
+}
+
+func (m Model) log(rp string) {
+ log.Println(`Generation completed!
+
+For this generation to compile you need to have some packages in your go.mod:
+
+ * github.com/go-openapi/validate
+ * github.com/go-openapi/strfmt
+
+You can get these now with: go mod tidy`)
+}
+
+func (m *Model) generate(opts *generator.GenOpts) error {
+ return generator.GenerateModels(append(m.Name, m.Models.Models...), opts)
+}
+
+// Execute generates a model file
+func (m *Model) Execute(args []string) error {
+
+ if m.Shared.DumpData && len(append(m.Name, m.Models.Models...)) > 1 {
+ return errors.New("only 1 model at a time is supported for dumping data")
+ }
+
+ if m.Models.ExistingModels != "" {
+ log.Println("warning: Ignoring existing-models flag when generating models.")
+ }
+ return createSwagger(m)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/operation.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/operation.go
new file mode 100644
index 000000000..ba554314a
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/operation.go
@@ -0,0 +1,104 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "errors"
+ "log"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+type operationOptions struct {
+ Operations []string `long:"operation" short:"O" description:"specify an operation to include, repeat for multiple (defaults to all)"`
+ Tags []string `long:"tags" description:"the tags to include, if not specified defaults to all" group:"operations"`
+ APIPackage string `long:"api-package" short:"a" description:"the package to save the operations" default:"operations"`
+ WithEnumCI bool `long:"with-enum-ci" description:"allow case-insensitive enumerations"`
+
+ // tags handling
+ SkipTagPackages bool `long:"skip-tag-packages" description:"skips the generation of tag-based operation packages, resulting in a flat generation"`
+}
+
+func (oo operationOptions) apply(opts *generator.GenOpts) {
+ opts.Operations = oo.Operations
+ opts.Tags = oo.Tags
+ opts.APIPackage = oo.APIPackage
+ opts.AllowEnumCI = oo.WithEnumCI
+ opts.SkipTagPackages = oo.SkipTagPackages
+}
+
+// WithOperations adds the operations options group
+type WithOperations struct {
+ Operations operationOptions `group:"Options for operation generation"`
+}
+
+// Operation the generate operation files command
+type Operation struct {
+ WithShared
+ WithOperations
+
+ clientOptions
+ serverOptions
+ schemeOptions
+ mediaOptions
+
+ ModelPackage string `long:"model-package" short:"m" description:"the package to save the models" default:"models"`
+
+ NoHandler bool `long:"skip-handler" description:"when present will not generate an operation handler"`
+ NoStruct bool `long:"skip-parameters" description:"when present will not generate the parameter model struct"`
+ NoResponses bool `long:"skip-responses" description:"when present will not generate the response model struct"`
+ NoURLBuilder bool `long:"skip-url-builder" description:"when present will not generate a URL builder"`
+
+ Name []string `long:"name" short:"n" description:"the operations to generate, repeat for multiple (defaults to all). Same as --operations"`
+}
+
+func (o Operation) apply(opts *generator.GenOpts) {
+ o.Shared.apply(opts)
+ o.Operations.apply(opts)
+ o.clientOptions.apply(opts)
+ o.serverOptions.apply(opts)
+ o.schemeOptions.apply(opts)
+ o.mediaOptions.apply(opts)
+
+ opts.ModelPackage = o.ModelPackage
+ opts.IncludeHandler = !o.NoHandler
+ opts.IncludeResponses = !o.NoResponses
+ opts.IncludeParameters = !o.NoStruct
+ opts.IncludeURLBuilder = !o.NoURLBuilder
+}
+
+func (o *Operation) generate(opts *generator.GenOpts) error {
+ return generator.GenerateServerOperation(append(o.Name, o.Operations.Operations...), opts)
+}
+
+func (o Operation) log(rp string) {
+
+ log.Println(`Generation completed!
+
+For this generation to compile you need to have some packages in your go.mod:
+
+ * github.com/go-openapi/runtime
+
+You can get these now with: go mod tidy`)
+}
+
+// Execute generates a model file
+func (o *Operation) Execute(args []string) error {
+ if o.Shared.DumpData && len(append(o.Name, o.Operations.Operations...)) > 1 {
+ return errors.New("only 1 operation at a time is supported for dumping data")
+ }
+
+ return createSwagger(o)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/server.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/server.go
new file mode 100644
index 000000000..92495adde
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/server.go
@@ -0,0 +1,119 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "log"
+ "strings"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+type serverOptions struct {
+ ServerPackage string `long:"server-package" short:"s" description:"the package to save the server specific code" default:"restapi"`
+ MainTarget string `long:"main-package" short:"" description:"the location of the generated main. Defaults to cmd/{name}-server" default:""`
+ ImplementationPackage string `long:"implementation-package" short:"" description:"the location of the backend implementation of the server, which will be autowired with api" default:""`
+}
+
+func (cs serverOptions) apply(opts *generator.GenOpts) {
+ opts.ServerPackage = cs.ServerPackage
+}
+
+// Server the command to generate an entire server application
+type Server struct {
+ WithShared
+ WithModels
+ WithOperations
+
+ serverOptions
+ schemeOptions
+ mediaOptions
+
+ SkipModels bool `long:"skip-models" description:"no models will be generated when this flag is specified"`
+ SkipOperations bool `long:"skip-operations" description:"no operations will be generated when this flag is specified"`
+ SkipSupport bool `long:"skip-support" description:"no supporting files will be generated when this flag is specified"`
+ ExcludeMain bool `long:"exclude-main" description:"exclude main function, so just generate the library"`
+ ExcludeSpec bool `long:"exclude-spec" description:"don't embed the swagger specification"`
+ FlagStrategy string `long:"flag-strategy" description:"the strategy to provide flags for the server" default:"go-flags" choice:"go-flags" choice:"pflag" choice:"flag"` // nolint: staticcheck
+ CompatibilityMode string `long:"compatibility-mode" description:"the compatibility mode for the tls server" default:"modern" choice:"modern" choice:"intermediate"` // nolint: staticcheck
+ RegenerateConfigureAPI bool `long:"regenerate-configureapi" description:"Force regeneration of configureapi.go"`
+
+ Name string `long:"name" short:"A" description:"the name of the application, defaults to a mangled value of info.title"`
+ // TODO(fredbi): CmdName string `long:"cmd-name" short:"A" description:"the name of the server command, when main is generated (defaults to {name}-server)"`
+
+ // deprecated flags
+ WithContext bool `long:"with-context" description:"handlers get a context as first arg (deprecated)"`
+}
+
+func (s Server) apply(opts *generator.GenOpts) {
+ if s.WithContext {
+ log.Printf("warning: deprecated option --with-context is ignored")
+ }
+
+ s.Shared.apply(opts)
+ s.Models.apply(opts)
+ s.Operations.apply(opts)
+ s.serverOptions.apply(opts)
+ s.schemeOptions.apply(opts)
+ s.mediaOptions.apply(opts)
+
+ opts.IncludeModel = !s.SkipModels
+ opts.IncludeValidator = !s.SkipModels
+ opts.IncludeHandler = !s.SkipOperations
+ opts.IncludeParameters = !s.SkipOperations
+ opts.IncludeResponses = !s.SkipOperations
+ opts.IncludeURLBuilder = !s.SkipOperations
+ opts.IncludeSupport = !s.SkipSupport
+ opts.IncludeMain = !s.ExcludeMain
+ opts.ExcludeSpec = s.ExcludeSpec
+ opts.FlagStrategy = s.FlagStrategy
+ opts.CompatibilityMode = s.CompatibilityMode
+ opts.RegenerateConfigureAPI = s.RegenerateConfigureAPI
+
+ opts.Name = s.Name
+ opts.MainPackage = s.MainTarget
+
+ opts.ImplementationPackage = s.ImplementationPackage
+}
+
+func (s *Server) generate(opts *generator.GenOpts) error {
+ return generator.GenerateServer(s.Name, s.Models.Models, s.Operations.Operations, opts)
+}
+
+func (s Server) log(rp string) {
+ var flagsPackage string
+ switch {
+ case strings.HasPrefix(s.FlagStrategy, "pflag"):
+ flagsPackage = "github.com/spf13/pflag"
+ case strings.HasPrefix(s.FlagStrategy, "flag"):
+ flagsPackage = "flag"
+ default:
+ flagsPackage = "github.com/jessevdk/go-flags"
+ }
+
+ log.Println(`Generation completed!
+
+For this generation to compile you need to have some packages in your go.mod:
+
+ * github.com/go-openapi/runtime
+ * ` + flagsPackage + `
+
+You can get these now with: go mod tidy`)
+}
+
+// Execute runs this command
+func (s *Server) Execute(args []string) error {
+ return createSwagger(s)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go
new file mode 100644
index 000000000..ab9725a7c
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go
@@ -0,0 +1,240 @@
+package generate
+
+import (
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/swag"
+ "github.com/go-swagger/go-swagger/generator"
+ flags "github.com/jessevdk/go-flags"
+ "github.com/spf13/viper"
+)
+
+// FlattenCmdOptions determines options to the flatten spec preprocessing
+type FlattenCmdOptions struct {
+ WithExpand bool `long:"with-expand" description:"expands all $ref's in spec prior to generation (shorthand to --with-flatten=expand)" group:"shared"`
+ WithFlatten []string `long:"with-flatten" description:"flattens all $ref's in spec prior to generation" choice:"minimal" choice:"full" choice:"expand" choice:"verbose" choice:"noverbose" choice:"remove-unused" default:"minimal" default:"verbose" group:"shared"` // nolint: staticcheck
+}
+
+// SetFlattenOptions builds flatten options from command line args
+func (f *FlattenCmdOptions) SetFlattenOptions(dflt *analysis.FlattenOpts) (res *analysis.FlattenOpts) {
+ res = &analysis.FlattenOpts{}
+ if dflt != nil {
+ *res = *dflt
+ }
+ if f == nil {
+ return
+ }
+ verboseIsSet := false
+ minimalIsSet := false
+ expandIsSet := false
+ if f.WithExpand {
+ res.Expand = true
+ expandIsSet = true
+ }
+ for _, opt := range f.WithFlatten {
+ switch opt {
+ case "verbose":
+ res.Verbose = true
+ verboseIsSet = true
+ case "noverbose":
+ if !verboseIsSet {
+ // verbose flag takes precedence
+ res.Verbose = false
+ verboseIsSet = true
+ }
+ case "remove-unused":
+ res.RemoveUnused = true
+ case "expand":
+ res.Expand = true
+ expandIsSet = true
+ case "full":
+ if !minimalIsSet && !expandIsSet {
+ // minimal flag takes precedence
+ res.Minimal = false
+ minimalIsSet = true
+ }
+ case "minimal":
+ if !expandIsSet {
+ // expand flag takes precedence
+ res.Minimal = true
+ minimalIsSet = true
+ }
+ }
+ }
+ return
+}
+
+type sharedCommand interface {
+ apply(*generator.GenOpts)
+ getConfigFile() string
+ generate(*generator.GenOpts) error
+ log(string)
+}
+
+type schemeOptions struct {
+ Principal string `short:"P" long:"principal" description:"the model to use for the security principal"`
+ DefaultScheme string `long:"default-scheme" description:"the default scheme for this API" default:"http"`
+
+ PrincipalIface bool `long:"principal-is-interface" description:"the security principal provided is an interface, not a struct"`
+}
+
+func (so schemeOptions) apply(opts *generator.GenOpts) {
+ opts.Principal = so.Principal
+ opts.PrincipalCustomIface = so.PrincipalIface
+ opts.DefaultScheme = so.DefaultScheme
+}
+
+type mediaOptions struct {
+ DefaultProduces string `long:"default-produces" description:"the default mime type that API operations produce" default:"application/json"`
+ DefaultConsumes string `long:"default-consumes" description:"the default mime type that API operations consume" default:"application/json"`
+}
+
+func (m mediaOptions) apply(opts *generator.GenOpts) {
+ opts.DefaultProduces = m.DefaultProduces
+ opts.DefaultConsumes = m.DefaultConsumes
+
+ const xmlIdentifier = "xml"
+ opts.WithXML = strings.Contains(opts.DefaultProduces, xmlIdentifier) || strings.Contains(opts.DefaultConsumes, xmlIdentifier)
+}
+
+// WithShared adds the shared options group
+type WithShared struct {
+ Shared sharedOptions `group:"Options common to all code generation commands"`
+}
+
+func (w WithShared) getConfigFile() string {
+ return string(w.Shared.ConfigFile)
+}
+
+type sharedOptionsCommon struct {
+ Spec flags.Filename `long:"spec" short:"f" description:"the spec file to use (default swagger.{json,yml,yaml})" group:"shared"`
+ Target flags.Filename `long:"target" short:"t" default:"./" description:"the base directory for generating the files" group:"shared"`
+ Template string `long:"template" description:"load contributed templates" choice:"stratoscale" group:"shared"`
+ TemplateDir flags.Filename `long:"template-dir" short:"T" description:"alternative template override directory" group:"shared"`
+ ConfigFile flags.Filename `long:"config-file" short:"C" description:"configuration file to use for overriding template options" group:"shared"`
+ CopyrightFile flags.Filename `long:"copyright-file" short:"r" description:"copyright file used to add copyright header" group:"shared"`
+ AdditionalInitialisms []string `long:"additional-initialism" description:"consecutive capitals that should be considered intialisms" group:"shared"`
+ AllowTemplateOverride bool `long:"allow-template-override" description:"allows overriding protected templates" group:"shared"`
+ SkipValidation bool `long:"skip-validation" description:"skips validation of spec prior to generation" group:"shared"`
+ DumpData bool `long:"dump-data" description:"when present dumps the json for the template generator instead of generating files" group:"shared"`
+ StrictResponders bool `long:"strict-responders" description:"Use strict type for the handler return value"`
+ FlattenCmdOptions
+}
+
+func (s sharedOptionsCommon) apply(opts *generator.GenOpts) {
+ opts.Spec = string(s.Spec)
+ opts.Target = string(s.Target)
+ opts.Template = s.Template
+ opts.TemplateDir = string(s.TemplateDir)
+ opts.AllowTemplateOverride = s.AllowTemplateOverride
+ opts.ValidateSpec = !s.SkipValidation
+ opts.DumpData = s.DumpData
+ opts.FlattenOpts = s.FlattenCmdOptions.SetFlattenOptions(opts.FlattenOpts)
+ opts.Copyright = string(s.CopyrightFile)
+ opts.StrictResponders = s.StrictResponders
+
+ swag.AddInitialisms(s.AdditionalInitialisms...)
+}
+
+func setCopyright(copyrightFile string) (string, error) {
+ // read the Copyright from file path in opts
+ if copyrightFile == "" {
+ return "", nil
+ }
+ bytebuffer, err := os.ReadFile(copyrightFile)
+ if err != nil {
+ return "", err
+ }
+ return string(bytebuffer), nil
+}
+
+func createSwagger(s sharedCommand) error {
+ cfg, err := readConfig(s.getConfigFile())
+ if err != nil {
+ return err
+ }
+ setDebug(cfg) // viper config Debug
+
+ opts := new(generator.GenOpts)
+ s.apply(opts)
+
+ opts.Copyright, err = setCopyright(opts.Copyright)
+ if err != nil {
+ return fmt.Errorf("could not load copyright file: %v", err)
+ }
+
+ if opts.Template != "" {
+ contribOptionsOverride(opts)
+ }
+
+ if err = opts.EnsureDefaults(); err != nil {
+ return err
+ }
+
+ if err = configureOptsFromConfig(cfg, opts); err != nil {
+ return err
+ }
+
+ if err = s.generate(opts); err != nil {
+ return err
+ }
+
+ basepath, err := filepath.Abs(".")
+ if err != nil {
+ return err
+ }
+
+ targetAbs, err := filepath.Abs(opts.Target)
+ if err != nil {
+ return err
+ }
+ rp, err := filepath.Rel(basepath, targetAbs)
+ if err != nil {
+ return err
+ }
+
+ s.log(rp)
+
+ return nil
+}
+
+func readConfig(filename string) (*viper.Viper, error) {
+ if filename == "" {
+ return nil, nil
+ }
+
+ abspath, err := filepath.Abs(filename)
+ if err != nil {
+ return nil, err
+ }
+ log.Println("trying to read config from", abspath)
+ return generator.ReadConfig(abspath)
+}
+
+func configureOptsFromConfig(cfg *viper.Viper, opts *generator.GenOpts) error {
+ if cfg == nil {
+ return nil
+ }
+
+ var def generator.LanguageDefinition
+ if err := cfg.Unmarshal(&def); err != nil {
+ return err
+ }
+ return def.ConfigureOpts(opts)
+}
+
+func setDebug(cfg *viper.Viper) {
+ // viper config debug
+ if os.Getenv("DEBUG") != "" || os.Getenv("SWAGGER_DEBUG") != "" {
+ if cfg != nil {
+ cfg.Debug()
+ } else {
+ log.Println("No config read")
+ }
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_nonwin.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_nonwin.go
new file mode 100644
index 000000000..7f7c25187
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_nonwin.go
@@ -0,0 +1,19 @@
+//go:build !windows
+// +build !windows
+
+package generate
+
+import (
+ "github.com/go-swagger/go-swagger/generator"
+ "github.com/jessevdk/go-flags"
+)
+
+type sharedOptions struct {
+ sharedOptionsCommon
+ TemplatePlugin flags.Filename `long:"template-plugin" short:"p" description:"the template plugin to use" group:"shared"`
+}
+
+func (s sharedOptions) apply(opts *generator.GenOpts) {
+ opts.TemplatePlugin = string(s.TemplatePlugin)
+ s.sharedOptionsCommon.apply(opts)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_win.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_win.go
new file mode 100644
index 000000000..b2cf00f91
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_win.go
@@ -0,0 +1,8 @@
+//go:build windows
+// +build windows
+
+package generate
+
+type sharedOptions struct {
+ sharedOptionsCommon
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go
new file mode 100644
index 000000000..3e16789b6
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go
@@ -0,0 +1,125 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-swagger/go-swagger/scan"
+ "github.com/jessevdk/go-flags"
+ "gopkg.in/yaml.v3"
+)
+
+// SpecFile command to generate a swagger spec from a go application
+type SpecFile struct {
+ BasePath string `long:"base-path" short:"b" description:"the base path to use" default:"."`
+ BuildTags string `long:"tags" short:"t" description:"build tags" default:""`
+ ScanModels bool `long:"scan-models" short:"m" description:"includes models that were annotated with 'swagger:model'"`
+ Compact bool `long:"compact" description:"when present, doesn't prettify the json"`
+ Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
+ Input flags.Filename `long:"input" short:"i" description:"an input swagger file with which to merge"`
+ Include []string `long:"include" short:"c" description:"include packages matching pattern"`
+ Exclude []string `long:"exclude" short:"x" description:"exclude packages matching pattern"`
+ IncludeTags []string `long:"include-tag" short:"" description:"include routes having specified tags (can be specified many times)"`
+ ExcludeTags []string `long:"exclude-tag" short:"" description:"exclude routes having specified tags (can be specified many times)"`
+}
+
+// Execute runs this command
+func (s *SpecFile) Execute(args []string) error {
+ input, err := loadSpec(string(s.Input))
+ if err != nil {
+ return err
+ }
+
+ var opts scan.Opts
+ opts.BasePath = s.BasePath
+ opts.Input = input
+ opts.ScanModels = s.ScanModels
+ opts.BuildTags = s.BuildTags
+ opts.Include = s.Include
+ opts.Exclude = s.Exclude
+ opts.IncludeTags = s.IncludeTags
+ opts.ExcludeTags = s.ExcludeTags
+ swspec, err := scan.Application(opts)
+ if err != nil {
+ return err
+ }
+
+ return writeToFile(swspec, !s.Compact, string(s.Output))
+}
+
+func loadSpec(input string) (*spec.Swagger, error) {
+ if fi, err := os.Stat(input); err == nil {
+ if fi.IsDir() {
+ return nil, fmt.Errorf("expected %q to be a file not a directory", input)
+ }
+ sp, err := loads.Spec(input)
+ if err != nil {
+ return nil, err
+ }
+ return sp.Spec(), nil
+ }
+ return nil, nil
+}
+
+func writeToFile(swspec *spec.Swagger, pretty bool, output string) error {
+ var b []byte
+ var err error
+
+ if strings.HasSuffix(output, "yml") || strings.HasSuffix(output, "yaml") {
+ b, err = marshalToYAMLFormat(swspec)
+ } else {
+ b, err = marshalToJSONFormat(swspec, pretty)
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if output == "" {
+ fmt.Println(string(b))
+ return nil
+ }
+ return os.WriteFile(output, b, 0644)
+}
+
+func marshalToJSONFormat(swspec *spec.Swagger, pretty bool) ([]byte, error) {
+ if pretty {
+ return json.MarshalIndent(swspec, "", " ")
+ }
+ return json.Marshal(swspec)
+}
+
+func marshalToYAMLFormat(swspec *spec.Swagger) ([]byte, error) {
+ b, err := json.Marshal(swspec)
+ if err != nil {
+ return nil, err
+ }
+
+ var jsonObj interface{}
+ if err := yaml.Unmarshal(b, &jsonObj); err != nil {
+ return nil, err
+ }
+
+ return yaml.Marshal(jsonObj)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go
new file mode 100644
index 000000000..bf2295864
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go
@@ -0,0 +1,119 @@
+//go:build go1.11
+// +build go1.11
+
+package generate
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/go-swagger/go-swagger/codescan"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/jessevdk/go-flags"
+ "gopkg.in/yaml.v3"
+)
+
+// SpecFile command to generate a swagger spec from a go application
+type SpecFile struct {
+ WorkDir string `long:"work-dir" short:"w" description:"the base path to use" default:"."`
+ BuildTags string `long:"tags" short:"t" description:"build tags" default:""`
+ ScanModels bool `long:"scan-models" short:"m" description:"includes models that were annotated with 'swagger:model'"`
+ Compact bool `long:"compact" description:"when present, doesn't prettify the json"`
+ Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
+ Input flags.Filename `long:"input" short:"i" description:"an input swagger file with which to merge"`
+ Include []string `long:"include" short:"c" description:"include packages matching pattern"`
+ Exclude []string `long:"exclude" short:"x" description:"exclude packages matching pattern"`
+ IncludeTags []string `long:"include-tag" short:"" description:"include routes having specified tags (can be specified many times)"`
+ ExcludeTags []string `long:"exclude-tag" short:"" description:"exclude routes having specified tags (can be specified many times)"`
+ ExcludeDeps bool `long:"exclude-deps" short:"" description:"exclude all dependencies of project"`
+}
+
+// Execute runs this command
+func (s *SpecFile) Execute(args []string) error {
+ if len(args) == 0 { // by default consider all the paths under the working directory
+ args = []string{"./..."}
+ }
+
+ input, err := loadSpec(string(s.Input))
+ if err != nil {
+ return err
+ }
+
+ var opts codescan.Options
+ opts.Packages = args
+ opts.WorkDir = s.WorkDir
+ opts.InputSpec = input
+ opts.ScanModels = s.ScanModels
+ opts.BuildTags = s.BuildTags
+ opts.Include = s.Include
+ opts.Exclude = s.Exclude
+ opts.IncludeTags = s.IncludeTags
+ opts.ExcludeTags = s.ExcludeTags
+ opts.ExcludeDeps = s.ExcludeDeps
+ swspec, err := codescan.Run(&opts)
+ if err != nil {
+ return err
+ }
+
+ return writeToFile(swspec, !s.Compact, string(s.Output))
+}
+
+func loadSpec(input string) (*spec.Swagger, error) {
+ if fi, err := os.Stat(input); err == nil {
+ if fi.IsDir() {
+ return nil, fmt.Errorf("expected %q to be a file not a directory", input)
+ }
+ sp, err := loads.Spec(input)
+ if err != nil {
+ return nil, err
+ }
+ return sp.Spec(), nil
+ }
+ return nil, nil
+}
+
+func writeToFile(swspec *spec.Swagger, pretty bool, output string) error {
+ var b []byte
+ var err error
+
+ if strings.HasSuffix(output, "yml") || strings.HasSuffix(output, "yaml") {
+ b, err = marshalToYAMLFormat(swspec)
+ } else {
+ b, err = marshalToJSONFormat(swspec, pretty)
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if output == "" {
+ fmt.Println(string(b))
+ return nil
+ }
+ return os.WriteFile(output, b, 0644) // #nosec
+}
+
+func marshalToJSONFormat(swspec *spec.Swagger, pretty bool) ([]byte, error) {
+ if pretty {
+ return json.MarshalIndent(swspec, "", " ")
+ }
+ return json.Marshal(swspec)
+}
+
+func marshalToYAMLFormat(swspec *spec.Swagger) ([]byte, error) {
+ b, err := json.Marshal(swspec)
+ if err != nil {
+ return nil, err
+ }
+
+ var jsonObj interface{}
+ if err := yaml.Unmarshal(b, &jsonObj); err != nil {
+ return nil, err
+ }
+
+ return yaml.Marshal(jsonObj)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/support.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/support.go
new file mode 100644
index 000000000..9e52f428c
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/support.go
@@ -0,0 +1,67 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "log"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+// Support generates the supporting files
+type Support struct {
+ WithShared
+ WithModels
+ WithOperations
+
+ clientOptions
+ serverOptions
+ schemeOptions
+ mediaOptions
+
+ Name string `long:"name" short:"A" description:"the name of the application, defaults to a mangled value of info.title"`
+}
+
+func (s *Support) apply(opts *generator.GenOpts) {
+ s.Shared.apply(opts)
+ s.Models.apply(opts)
+ s.Operations.apply(opts)
+ s.clientOptions.apply(opts)
+ s.serverOptions.apply(opts)
+ s.schemeOptions.apply(opts)
+ s.mediaOptions.apply(opts)
+}
+
+func (s *Support) generate(opts *generator.GenOpts) error {
+ return generator.GenerateSupport(s.Name, s.Models.Models, s.Operations.Operations, opts)
+}
+
+func (s Support) log(rp string) {
+
+ log.Println(`Generation completed!
+
+For this generation to compile you need to have some packages in go.mod:
+
+ * github.com/go-openapi/runtime
+ * github.com/asaskevich/govalidator
+ * github.com/jessevdk/go-flags
+
+You can get these now with: go mod tidy`)
+}
+
+// Execute generates the supporting files file
+func (s *Support) Execute(args []string) error {
+ return createSwagger(s)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd.go
new file mode 100644
index 000000000..7a992f2b7
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd.go
@@ -0,0 +1,13 @@
+package commands
+
+import "github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd"
+
+// InitCmd is a command namespace for initializing things like a swagger spec.
+type InitCmd struct {
+ Model *initcmd.Spec `command:"spec"`
+}
+
+// Execute provides default empty implementation
+func (i *InitCmd) Execute(args []string) error {
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd/spec.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd/spec.go
new file mode 100644
index 000000000..c540dc5b4
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd/spec.go
@@ -0,0 +1,111 @@
+package initcmd
+
+import (
+ "encoding/json"
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+
+ "gopkg.in/yaml.v3"
+
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+// Spec a command struct for initializing a new swagger application.
+type Spec struct {
+ Format string `long:"format" description:"the format for the spec document" default:"yaml" choice:"yaml" choice:"json"`
+ Title string `long:"title" description:"the title of the API"`
+ Description string `long:"description" description:"the description of the API"`
+ Version string `long:"version" description:"the version of the API" default:"0.1.0"`
+ Terms string `long:"terms" description:"the terms of services"`
+ Consumes []string `long:"consumes" description:"add a content type to the global consumes definitions, can repeat" default:"application/json"`
+ Produces []string `long:"produces" description:"add a content type to the global produces definitions, can repeat" default:"application/json"`
+ Schemes []string `long:"scheme" description:"add a scheme to the global schemes definition, can repeat" default:"http"`
+ Contact struct {
+ Name string `long:"contact.name" description:"name of the primary contact for the API"`
+ URL string `long:"contact.url" description:"url of the primary contact for the API"`
+ Email string `long:"contact.email" description:"email of the primary contact for the API"`
+ }
+ License struct {
+ Name string `long:"license.name" description:"name of the license for the API"`
+ URL string `long:"license.url" description:"url of the license for the API"`
+ }
+}
+
+// Execute this command
+func (s *Spec) Execute(args []string) error {
+ targetPath := "."
+ if len(args) > 0 {
+ targetPath = args[0]
+ }
+ realPath, err := filepath.Abs(targetPath)
+ if err != nil {
+ return err
+ }
+ var file *os.File
+ switch s.Format {
+ case "json":
+ file, err = os.Create(filepath.Join(realPath, "swagger.json"))
+ if err != nil {
+ return err
+ }
+ case "yaml", "yml":
+ file, err = os.Create(filepath.Join(realPath, "swagger.yml"))
+ if err != nil {
+ return err
+ }
+ default:
+ return fmt.Errorf("invalid format: %s", s.Format)
+ }
+ defer file.Close()
+ log.Println("creating specification document in", filepath.Join(targetPath, file.Name()))
+
+ var doc spec.Swagger
+ info := new(spec.Info)
+ doc.Info = info
+
+ doc.Swagger = "2.0"
+ doc.Paths = new(spec.Paths)
+ doc.Definitions = make(spec.Definitions)
+
+ info.Title = s.Title
+ if info.Title == "" {
+ info.Title = swag.ToHumanNameTitle(filepath.Base(realPath))
+ }
+ info.Description = s.Description
+ info.Version = s.Version
+ info.TermsOfService = s.Terms
+ if s.Contact.Name != "" || s.Contact.Email != "" || s.Contact.URL != "" {
+ var contact spec.ContactInfo
+ contact.Name = s.Contact.Name
+ contact.Email = s.Contact.Email
+ contact.URL = s.Contact.URL
+ info.Contact = &contact
+ }
+ if s.License.Name != "" || s.License.URL != "" {
+ var license spec.License
+ license.Name = s.License.Name
+ license.URL = s.License.URL
+ info.License = &license
+ }
+
+ doc.Consumes = append(doc.Consumes, s.Consumes...)
+ doc.Produces = append(doc.Produces, s.Produces...)
+ doc.Schemes = append(doc.Schemes, s.Schemes...)
+
+ if s.Format == "json" {
+ enc := json.NewEncoder(file)
+ return enc.Encode(doc)
+ }
+
+ b, err := yaml.Marshal(swag.ToDynamicJSON(doc))
+ if err != nil {
+ return err
+ }
+ if _, err := file.Write(b); err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/mixin.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/mixin.go
new file mode 100644
index 000000000..79e26c440
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/mixin.go
@@ -0,0 +1,117 @@
+package commands
+
+import (
+ "errors"
+ "io"
+ "log"
+ "os"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ flags "github.com/jessevdk/go-flags"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+const (
+ // Output messages
+ nothingToDo = "nothing to do. Need some swagger files to merge.\nUSAGE: swagger mixin [-c <expected#Collisions>] <primary-swagger-file> <mixin-swagger-file...>"
+ ignoreConflictsAndCollisionsSpecified = "both the flags ignore conflicts and collisions were specified. These have conflicting meaning so please only specify one"
+)
+
+// MixinSpec holds command line flag definitions specific to the mixin
+// command. The flags are defined using struct field tags with the
+// "github.com/jessevdk/go-flags" format.
+type MixinSpec struct {
+ ExpectedCollisionCount uint `short:"c" description:"expected # of rejected mixin paths, defs, etc due to existing key. Non-zero exit if does not match actual."`
+ Compact bool `long:"compact" description:"applies to JSON formatted specs. When present, doesn't prettify the json"`
+ Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
+ KeepSpecOrder bool `long:"keep-spec-order" description:"Keep schema properties order identical to spec file"`
+ Format string `long:"format" description:"the format for the spec document" default:"json" choice:"yaml" choice:"json"`
+ IgnoreConflicts bool `long:"ignore-conflicts" description:"Ignore conflict"`
+}
+
+// Execute runs the mixin command which merges Swagger 2.0 specs into
+// one spec
+//
+// Use cases include adding independently versioned metadata APIs to
+// application APIs for microservices.
+//
+// Typically, multiple APIs to the same service instance is not a
+// problem for client generation as you can create more than one
+// client to the service from the same calling process (one for each
+// API). However, merging clients can improve clarity of client code
+// by having a single client to given service vs several.
+//
+// Server skeleton generation, ie generating the model & marshaling
+// code, http server instance etc. from Swagger, becomes easier with a
+// merged spec for some tools & target-languages. Server code
+// generation tools that natively support hosting multiple specs in
+// one server process will not need this tool.
+func (c *MixinSpec) Execute(args []string) error {
+
+ if len(args) < 2 {
+ return errors.New(nothingToDo)
+ }
+ if c.IgnoreConflicts && c.ExpectedCollisionCount != 0 {
+ return errors.New(ignoreConflictsAndCollisionsSpecified)
+ }
+
+ log.Printf("args[0] = %v\n", args[0])
+ log.Printf("args[1:] = %v\n", args[1:])
+ collisions, err := c.MixinFiles(args[0], args[1:], os.Stdout)
+
+ for _, warn := range collisions {
+ log.Println(warn)
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if c.IgnoreConflicts {
+ return nil
+ }
+ if len(collisions) != int(c.ExpectedCollisionCount) {
+ if len(collisions) != 0 {
+ // use bash $? to get actual # collisions
+ // (but has to be non-zero)
+ os.Exit(len(collisions))
+ }
+ os.Exit(254)
+ }
+ return nil
+}
+
+// MixinFiles is a convenience function for Mixin that reads the given
+// swagger files, adds the mixins to primary, calls
+// FixEmptyResponseDescriptions on the primary, and writes the primary
+// with mixins to the given writer in JSON. Returns the warning
+// messages for collisions that occurred during mixin process and any
+// error.
+func (c *MixinSpec) MixinFiles(primaryFile string, mixinFiles []string, w io.Writer) ([]string, error) {
+
+ primaryDoc, err := loads.Spec(primaryFile)
+ if err != nil {
+ return nil, err
+ }
+ primary := primaryDoc.Spec()
+
+ var mixins []*spec.Swagger
+ for _, mixinFile := range mixinFiles {
+ if c.KeepSpecOrder {
+ mixinFile = generator.WithAutoXOrder(mixinFile)
+ }
+ mixin, lerr := loads.Spec(mixinFile)
+ if lerr != nil {
+ return nil, lerr
+ }
+ mixins = append(mixins, mixin.Spec())
+ }
+
+ collisions := analysis.Mixin(primary, mixins...)
+ analysis.FixEmptyResponseDescriptions(primary)
+
+ return collisions, writeToFile(primary, !c.Compact, c.Format, string(c.Output))
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go
new file mode 100644
index 000000000..aeea4cedd
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go
@@ -0,0 +1,117 @@
+package commands
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log"
+ "net"
+ "net/http"
+ "path"
+ "strconv"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+ "github.com/gorilla/handlers"
+ "github.com/toqueteos/webbrowser"
+)
+
+// ServeCmd to serve a swagger spec with docs ui
+type ServeCmd struct {
+ BasePath string `long:"base-path" description:"the base path to serve the spec and UI at"`
+ Flavor string `short:"F" long:"flavor" description:"the flavor of docs, can be swagger or redoc" default:"redoc" choice:"redoc" choice:"swagger"`
+ DocURL string `long:"doc-url" description:"override the url which takes a url query param to render the doc ui"`
+ NoOpen bool `long:"no-open" description:"when present won't open the the browser to show the url"`
+ NoUI bool `long:"no-ui" description:"when present, only the swagger spec will be served"`
+ Flatten bool `long:"flatten" description:"when present, flatten the swagger spec before serving it"`
+ Port int `long:"port" short:"p" description:"the port to serve this site" env:"PORT"`
+ Host string `long:"host" description:"the interface to serve this site, defaults to 0.0.0.0" default:"0.0.0.0" env:"HOST"`
+ Path string `long:"path" description:"the uri path at which the docs will be served" default:"docs"`
+}
+
+// Execute the serve command
+func (s *ServeCmd) Execute(args []string) error {
+ if len(args) == 0 {
+ return errors.New("specify the spec to serve as argument to the serve command")
+ }
+
+ specDoc, err := loads.Spec(args[0])
+ if err != nil {
+ return err
+ }
+
+ if s.Flatten {
+ specDoc, err = specDoc.Expanded(&spec.ExpandOptions{
+ SkipSchemas: false,
+ ContinueOnError: true,
+ AbsoluteCircularRef: true,
+ })
+
+ if err != nil {
+ return err
+ }
+ }
+
+ b, err := json.MarshalIndent(specDoc.Spec(), "", " ")
+ if err != nil {
+ return err
+ }
+
+ basePath := s.BasePath
+ if basePath == "" {
+ basePath = "/"
+ }
+
+ listener, err := net.Listen("tcp4", net.JoinHostPort(s.Host, strconv.Itoa(s.Port)))
+ if err != nil {
+ return err
+ }
+ sh, sp, err := swag.SplitHostPort(listener.Addr().String())
+ if err != nil {
+ return err
+ }
+ if sh == "0.0.0.0" {
+ sh = "localhost"
+ }
+
+ visit := s.DocURL
+ handler := http.NotFoundHandler()
+ if !s.NoUI {
+ if s.Flavor == "redoc" {
+ handler = middleware.Redoc(middleware.RedocOpts{
+ BasePath: basePath,
+ SpecURL: path.Join(basePath, "swagger.json"),
+ Path: s.Path,
+ }, handler)
+ visit = fmt.Sprintf("http://%s:%d%s", sh, sp, path.Join(basePath, "docs"))
+ } else if visit != "" || s.Flavor == "swagger" {
+ handler = middleware.SwaggerUI(middleware.SwaggerUIOpts{
+ BasePath: basePath,
+ SpecURL: path.Join(basePath, "swagger.json"),
+ Path: s.Path,
+ }, handler)
+ visit = fmt.Sprintf("http://%s:%d%s", sh, sp, path.Join(basePath, s.Path))
+ }
+ }
+
+ handler = handlers.CORS()(middleware.Spec(basePath, b, handler))
+ errFuture := make(chan error)
+ go func() {
+ docServer := new(http.Server)
+ docServer.SetKeepAlivesEnabled(true)
+ docServer.Handler = handler
+
+ errFuture <- docServer.Serve(listener)
+ }()
+
+ if !s.NoOpen && !s.NoUI {
+ err := webbrowser.Open(visit)
+ if err != nil {
+ return err
+ }
+ }
+ log.Println("serving docs at", visit)
+ return <-errFuture
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/validate.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/validate.go
new file mode 100644
index 000000000..220c8b853
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/validate.go
@@ -0,0 +1,83 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "errors"
+ "fmt"
+ "log"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/validate"
+)
+
+const (
+ // Output messages
+ missingArgMsg = "the validate command requires the swagger document url to be specified"
+ validSpecMsg = "\nThe swagger spec at %q is valid against swagger specification %s\n"
+ invalidSpecMsg = "\nThe swagger spec at %q is invalid against swagger specification %s.\nSee errors below:\n"
+ warningSpecMsg = "\nThe swagger spec at %q showed up some valid but possibly unwanted constructs."
+)
+
+// ValidateSpec is a command that validates a swagger document
+// against the swagger specification
+type ValidateSpec struct {
+ // SchemaURL string `long:"schema" description:"The schema url to use" default:"http://swagger.io/v2/schema.json"`
+ SkipWarnings bool `long:"skip-warnings" description:"when present will not show up warnings upon validation"`
+ StopOnError bool `long:"stop-on-error" description:"when present will not continue validation after critical errors are found"`
+}
+
+// Execute validates the spec
+func (c *ValidateSpec) Execute(args []string) error {
+ if len(args) == 0 {
+ return errors.New(missingArgMsg)
+ }
+
+ swaggerDoc := args[0]
+
+ specDoc, err := loads.Spec(swaggerDoc)
+ if err != nil {
+ return err
+ }
+
+ // Attempts to report about all errors
+ validate.SetContinueOnErrors(!c.StopOnError)
+
+ v := validate.NewSpecValidator(specDoc.Schema(), strfmt.Default)
+ result, _ := v.Validate(specDoc) // returns fully detailed result with errors and warnings
+
+ if result.IsValid() {
+ log.Printf(validSpecMsg, swaggerDoc, specDoc.Version())
+ }
+ if result.HasWarnings() {
+ log.Printf(warningSpecMsg, swaggerDoc)
+ if !c.SkipWarnings {
+ log.Printf("See warnings below:\n")
+ for _, desc := range result.Warnings {
+ log.Printf("- WARNING: %s\n", desc.Error())
+ }
+ }
+ }
+ if result.HasErrors() {
+ str := fmt.Sprintf(invalidSpecMsg, swaggerDoc, specDoc.Version())
+ for _, desc := range result.Errors {
+ str += fmt.Sprintf("- %s\n", desc.Error())
+ }
+ return errors.New(str)
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/version.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/version.go
new file mode 100644
index 000000000..9a860653b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/version.go
@@ -0,0 +1,37 @@
+package commands
+
+import (
+ "fmt"
+ "runtime/debug"
+)
+
+var (
+ // Version for the swagger command
+ Version string
+ // Commit for the swagger command
+ Commit string
+)
+
+// PrintVersion the command
+type PrintVersion struct {
+}
+
+// Execute this command
+func (p *PrintVersion) Execute(args []string) error {
+ if Version == "" {
+ if info, available := debug.ReadBuildInfo(); available && info.Main.Version != "(devel)" {
+ // built from source, with module (e.g. go get)
+ fmt.Println("version:", info.Main.Version)
+ fmt.Println("commit:", fmt.Sprintf("(unknown, mod sum: %q)", info.Main.Sum))
+ return nil
+ }
+ // built from source, local repo
+ fmt.Println("dev")
+ return nil
+ }
+ // released version
+ fmt.Println("version:", Version)
+ fmt.Println("commit:", Commit)
+
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/swagger.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/swagger.go
new file mode 100644
index 000000000..dfc89ba2e
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/swagger.go
@@ -0,0 +1,143 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "io"
+ "log"
+ "os"
+
+ "github.com/go-swagger/go-swagger/cmd/swagger/commands"
+ flags "github.com/jessevdk/go-flags"
+)
+
+var opts struct {
+ // General options applicable to all commands
+ Quiet func() `long:"quiet" short:"q" description:"silence logs"`
+ LogFile func(string) `long:"log-output" description:"redirect logs to file" value-name:"LOG-FILE"`
+ // Version bool `long:"version" short:"v" description:"print the version of the command"`
+}
+
+func main() {
+ // TODO: reactivate 'defer catch all' once product is stable
+ // Recovering from internal panics
+ // Stack may be printed in Debug mode
+ // Need import "runtime/debug".
+ // defer func() {
+ // r := recover()
+ // if r != nil {
+ // log.Printf("Fatal error:", r)
+ // if Debug {
+ // debug.PrintStack()
+ // }
+ // os.Exit(1)
+ // }
+ // }()
+
+ parser := flags.NewParser(&opts, flags.Default)
+ parser.ShortDescription = "helps you keep your API well described"
+ parser.LongDescription = `
+Swagger tries to support you as best as possible when building APIs.
+
+It aims to represent the contract of your API with a language agnostic description of your application in json or yaml.
+`
+ _, err := parser.AddCommand("validate", "validate the swagger document", "validate the provided swagger document against a swagger spec", &commands.ValidateSpec{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("init", "initialize a spec document", "initialize a swagger spec document", &commands.InitCmd{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("version", "print the version", "print the version of the swagger command", &commands.PrintVersion{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("serve", "serve spec and docs", "serve a spec and swagger or redoc documentation ui", &commands.ServeCmd{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("expand", "expand $ref fields in a swagger spec", "expands the $refs in a swagger document to inline schemas", &commands.ExpandSpec{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("flatten", "flattens a swagger document", "expand the remote references in a spec and move inline schemas to definitions, after flattening there are no complex inlined anymore", &commands.FlattenSpec{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("mixin", "merge swagger documents", "merge additional specs into first/primary spec by copying their paths and definitions", &commands.MixinSpec{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("diff", "diff swagger documents", "diff specs showing which changes will break existing clients", &commands.DiffCommand{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ genpar, err := parser.AddCommand("generate", "generate go code", "generate go code for the swagger spec file", &commands.Generate{})
+ if err != nil {
+ log.Fatalln(err)
+ }
+ for _, cmd := range genpar.Commands() {
+ switch cmd.Name {
+ case "spec":
+ cmd.ShortDescription = "generate a swagger spec document from a go application"
+ cmd.LongDescription = cmd.ShortDescription
+ case "client":
+ cmd.ShortDescription = "generate all the files for a client library"
+ cmd.LongDescription = cmd.ShortDescription
+ case "server":
+ cmd.ShortDescription = "generate all the files for a server application"
+ cmd.LongDescription = cmd.ShortDescription
+ case "model":
+ cmd.ShortDescription = "generate one or more models from the swagger spec"
+ cmd.LongDescription = cmd.ShortDescription
+ case "support":
+ cmd.ShortDescription = "generate supporting files like the main function and the api builder"
+ cmd.LongDescription = cmd.ShortDescription
+ case "operation":
+ cmd.ShortDescription = "generate one or more server operations from the swagger spec"
+ cmd.LongDescription = cmd.ShortDescription
+ case "markdown":
+ cmd.ShortDescription = "generate a markdown representation from the swagger spec"
+ cmd.LongDescription = cmd.ShortDescription
+ case "cli":
+ cmd.ShortDescription = "generate a command line client tool from the swagger spec"
+ cmd.LongDescription = cmd.ShortDescription
+ }
+ }
+
+ opts.Quiet = func() {
+ log.SetOutput(io.Discard)
+ }
+ opts.LogFile = func(logfile string) {
+ f, err := os.OpenFile(logfile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666)
+ if err != nil {
+ log.Fatalf("cannot write to file %s: %v", logfile, err)
+ }
+ log.SetOutput(f)
+ }
+
+ if _, err := parser.Parse(); err != nil {
+ os.Exit(1)
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/README.md b/vendor/github.com/go-swagger/go-swagger/codescan/README.md
new file mode 100644
index 000000000..7468cda5b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/README.md
@@ -0,0 +1,3 @@
+# codescan
+
+Version of the go source parser with support for go modules, from go1.11 onwards.
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/application.go b/vendor/github.com/go-swagger/go-swagger/codescan/application.go
new file mode 100644
index 000000000..952d9fb1f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/application.go
@@ -0,0 +1,674 @@
+package codescan
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "log"
+ "os"
+ "strings"
+
+ "github.com/go-openapi/swag"
+
+ "golang.org/x/tools/go/packages"
+
+ "github.com/go-openapi/spec"
+)
+
+const pkgLoadMode = packages.NeedName | packages.NeedFiles | packages.NeedImports | packages.NeedDeps | packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo
+
+func safeConvert(str string) bool {
+ b, err := swag.ConvertBool(str)
+ if err != nil {
+ return false
+ }
+ return b
+}
+
+// Debug is true when process is run with DEBUG=1 env var
+var Debug = safeConvert(os.Getenv("DEBUG"))
+
+type node uint32
+
+const (
+ metaNode node = 1 << iota
+ routeNode
+ operationNode
+ modelNode
+ parametersNode
+ responseNode
+)
+
+// Options for the scanner
+type Options struct {
+ Packages []string
+ InputSpec *spec.Swagger
+ ScanModels bool
+ WorkDir string
+ BuildTags string
+ ExcludeDeps bool
+ Include []string
+ Exclude []string
+ IncludeTags []string
+ ExcludeTags []string
+}
+
+type scanCtx struct {
+ pkgs []*packages.Package
+ app *typeIndex
+}
+
+func sliceToSet(names []string) map[string]bool {
+ result := make(map[string]bool)
+ for _, v := range names {
+ result[v] = true
+ }
+ return result
+}
+
+// Run the scanner to produce a spec with the options provided
+func Run(opts *Options) (*spec.Swagger, error) {
+ sc, err := newScanCtx(opts)
+ if err != nil {
+ return nil, err
+ }
+ sb := newSpecBuilder(opts.InputSpec, sc, opts.ScanModels)
+ return sb.Build()
+}
+
+func newScanCtx(opts *Options) (*scanCtx, error) {
+ cfg := &packages.Config{
+ Dir: opts.WorkDir,
+ Mode: pkgLoadMode,
+ Tests: false,
+ }
+ if opts.BuildTags != "" {
+ cfg.BuildFlags = []string{"-tags", opts.BuildTags}
+ }
+
+ pkgs, err := packages.Load(cfg, opts.Packages...)
+ if err != nil {
+ return nil, err
+ }
+
+ app, err := newTypeIndex(pkgs, opts.ExcludeDeps,
+ sliceToSet(opts.IncludeTags), sliceToSet(opts.ExcludeTags),
+ opts.Include, opts.Exclude)
+ if err != nil {
+ return nil, err
+ }
+
+ return &scanCtx{
+ pkgs: pkgs,
+ app: app,
+ }, nil
+}
+
+type entityDecl struct {
+ Comments *ast.CommentGroup
+ Type *types.Named
+ Ident *ast.Ident
+ Spec *ast.TypeSpec
+ File *ast.File
+ Pkg *packages.Package
+ hasModelAnnotation bool
+ hasResponseAnnotation bool
+ hasParameterAnnotation bool
+}
+
+func (d *entityDecl) Names() (name, goName string) {
+ goName = d.Ident.Name
+ name = goName
+ if d.Comments == nil {
+ return
+ }
+
+DECLS:
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxModelOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasModelAnnotation = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ name = matches[1]
+ break DECLS
+ }
+ }
+ }
+ return
+}
+
+func (d *entityDecl) ResponseNames() (name, goName string) {
+ goName = d.Ident.Name
+ name = goName
+ if d.Comments == nil {
+ return
+ }
+
+DECLS:
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxResponseOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasResponseAnnotation = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ name = matches[1]
+ break DECLS
+ }
+ }
+ }
+ return
+}
+
+func (d *entityDecl) OperationIDS() (result []string) {
+ if d == nil || d.Comments == nil {
+ return nil
+ }
+
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxParametersOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasParameterAnnotation = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ for _, pt := range strings.Split(matches[1], " ") {
+ tr := strings.TrimSpace(pt)
+ if len(tr) > 0 {
+ result = append(result, tr)
+ }
+ }
+ }
+ }
+ }
+ return
+}
+
+func (d *entityDecl) HasModelAnnotation() bool {
+ if d.hasModelAnnotation {
+ return true
+ }
+ if d.Comments == nil {
+ return false
+ }
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxModelOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasModelAnnotation = true
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func (d *entityDecl) HasResponseAnnotation() bool {
+ if d.hasResponseAnnotation {
+ return true
+ }
+ if d.Comments == nil {
+ return false
+ }
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxResponseOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasResponseAnnotation = true
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func (d *entityDecl) HasParameterAnnotation() bool {
+ if d.hasParameterAnnotation {
+ return true
+ }
+ if d.Comments == nil {
+ return false
+ }
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxParametersOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasParameterAnnotation = true
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func (s *scanCtx) FindDecl(pkgPath, name string) (*entityDecl, bool) {
+ if pkg, ok := s.app.AllPackages[pkgPath]; ok {
+ for _, file := range pkg.Syntax {
+ for _, d := range file.Decls {
+ gd, ok := d.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+
+ for _, sp := range gd.Specs {
+ if ts, ok := sp.(*ast.TypeSpec); ok && ts.Name.Name == name {
+ def, ok := pkg.TypesInfo.Defs[ts.Name]
+ if !ok {
+ debugLog("couldn't find type info for %s", ts.Name)
+ continue
+ }
+ nt, isNamed := def.Type().(*types.Named)
+ if !isNamed {
+ debugLog("%s is not a named type but a %T", ts.Name, def.Type())
+ continue
+ }
+
+ comments := ts.Doc // type ( /* doc */ Foo struct{} )
+ if comments == nil {
+ comments = gd.Doc // /* doc */ type ( Foo struct{} )
+ }
+
+ decl := &entityDecl{
+ Comments: comments,
+ Type: nt,
+ Ident: ts.Name,
+ Spec: ts,
+ File: file,
+ Pkg: pkg,
+ }
+ return decl, true
+ }
+
+ }
+ }
+ }
+ }
+ return nil, false
+}
+
+func (s *scanCtx) FindModel(pkgPath, name string) (*entityDecl, bool) {
+ for _, cand := range s.app.Models {
+ ct := cand.Type.Obj()
+ if ct.Name() == name && ct.Pkg().Path() == pkgPath {
+ return cand, true
+ }
+ }
+ if decl, found := s.FindDecl(pkgPath, name); found {
+ s.app.ExtraModels[decl.Ident] = decl
+ return decl, true
+ }
+ return nil, false
+}
+
+func (s *scanCtx) PkgForPath(pkgPath string) (*packages.Package, bool) {
+ v, ok := s.app.AllPackages[pkgPath]
+ return v, ok
+}
+
+func (s *scanCtx) DeclForType(t types.Type) (*entityDecl, bool) {
+ switch tpe := t.(type) {
+ case *types.Pointer:
+ return s.DeclForType(tpe.Elem())
+ case *types.Named:
+ return s.FindDecl(tpe.Obj().Pkg().Path(), tpe.Obj().Name())
+
+ default:
+ log.Printf("unknown type to find the package for [%T]: %s", t, t.String())
+ return nil, false
+ }
+}
+
+func (s *scanCtx) PkgForType(t types.Type) (*packages.Package, bool) {
+ switch tpe := t.(type) {
+ // case *types.Basic:
+ // case *types.Struct:
+ // case *types.Pointer:
+ // case *types.Interface:
+ // case *types.Array:
+ // case *types.Slice:
+ // case *types.Map:
+ case *types.Named:
+ v, ok := s.app.AllPackages[tpe.Obj().Pkg().Path()]
+ return v, ok
+ default:
+ log.Printf("unknown type to find the package for [%T]: %s", t, t.String())
+ return nil, false
+ }
+}
+
+func (s *scanCtx) FindComments(pkg *packages.Package, name string) (*ast.CommentGroup, bool) {
+ for _, f := range pkg.Syntax {
+ for _, d := range f.Decls {
+ gd, ok := d.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+
+ for _, s := range gd.Specs {
+ if ts, ok := s.(*ast.TypeSpec); ok {
+ if ts.Name.Name == name {
+ return gd.Doc, true
+ }
+ }
+ }
+ }
+ }
+ return nil, false
+}
+
+func (s *scanCtx) FindEnumValues(pkg *packages.Package, enumName string) (list []interface{}, descList []string, _ bool) {
+ for _, f := range pkg.Syntax {
+ for _, d := range f.Decls {
+ gd, ok := d.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+
+ if gd.Tok != token.CONST {
+ continue
+ }
+
+ for _, s := range gd.Specs {
+ if vs, ok := s.(*ast.ValueSpec); ok {
+ if vsIdent, ok := vs.Type.(*ast.Ident); ok {
+ if vsIdent.Name == enumName {
+ if len(vs.Values) > 0 {
+ if bl, ok := vs.Values[0].(*ast.BasicLit); ok {
+ blValue := getEnumBasicLitValue(bl)
+ list = append(list, blValue)
+
+ // build the enum description
+ var (
+ desc = &strings.Builder{}
+ namesLen = len(vs.Names)
+ )
+ desc.WriteString(fmt.Sprintf("%v ", blValue))
+ for i, name := range vs.Names {
+ desc.WriteString(name.Name)
+ if i < namesLen-1 {
+ desc.WriteString(" ")
+ }
+ }
+ if vs.Doc != nil {
+ docListLen := len(vs.Doc.List)
+ if docListLen > 0 {
+ desc.WriteString(" ")
+ }
+ for i, doc := range vs.Doc.List {
+ if doc.Text != "" {
+ var text = strings.TrimPrefix(doc.Text, "//")
+ desc.WriteString(text)
+ if i < docListLen-1 {
+ desc.WriteString(" ")
+ }
+ }
+ }
+ }
+ descList = append(descList, desc.String())
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ return list, descList, true
+}
+
+func newTypeIndex(pkgs []*packages.Package,
+ excludeDeps bool, includeTags, excludeTags map[string]bool,
+ includePkgs, excludePkgs []string) (*typeIndex, error) {
+
+ ac := &typeIndex{
+ AllPackages: make(map[string]*packages.Package),
+ Models: make(map[*ast.Ident]*entityDecl),
+ ExtraModels: make(map[*ast.Ident]*entityDecl),
+ excludeDeps: excludeDeps,
+ includeTags: includeTags,
+ excludeTags: excludeTags,
+ includePkgs: includePkgs,
+ excludePkgs: excludePkgs,
+ }
+ if err := ac.build(pkgs); err != nil {
+ return nil, err
+ }
+ return ac, nil
+}
+
+type typeIndex struct {
+ AllPackages map[string]*packages.Package
+ Models map[*ast.Ident]*entityDecl
+ ExtraModels map[*ast.Ident]*entityDecl
+ Meta []metaSection
+ Routes []parsedPathContent
+ Operations []parsedPathContent
+ Parameters []*entityDecl
+ Responses []*entityDecl
+ excludeDeps bool
+ includeTags map[string]bool
+ excludeTags map[string]bool
+ includePkgs []string
+ excludePkgs []string
+}
+
+func (a *typeIndex) build(pkgs []*packages.Package) error {
+ for _, pkg := range pkgs {
+ if _, known := a.AllPackages[pkg.PkgPath]; known {
+ continue
+ }
+ a.AllPackages[pkg.PkgPath] = pkg
+ if err := a.processPackage(pkg); err != nil {
+ return err
+ }
+ if err := a.walkImports(pkg); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (a *typeIndex) processPackage(pkg *packages.Package) error {
+ if !shouldAcceptPkg(pkg.PkgPath, a.includePkgs, a.excludePkgs) {
+ debugLog("package %s is ignored due to rules", pkg.Name)
+ return nil
+ }
+
+ for _, file := range pkg.Syntax {
+ n, err := a.detectNodes(file)
+ if err != nil {
+ return err
+ }
+
+ if n&metaNode != 0 {
+ a.Meta = append(a.Meta, metaSection{Comments: file.Doc})
+ }
+
+ if n&operationNode != 0 {
+ for _, cmts := range file.Comments {
+ pp := parsePathAnnotation(rxOperation, cmts.List)
+ if pp.Method == "" {
+ continue // not a valid operation
+ }
+ if !shouldAcceptTag(pp.Tags, a.includeTags, a.excludeTags) {
+ debugLog("operation %s %s is ignored due to tag rules", pp.Method, pp.Path)
+ continue
+ }
+ a.Operations = append(a.Operations, pp)
+ }
+ }
+
+ if n&routeNode != 0 {
+ for _, cmts := range file.Comments {
+ pp := parsePathAnnotation(rxRoute, cmts.List)
+ if pp.Method == "" {
+ continue // not a valid operation
+ }
+ if !shouldAcceptTag(pp.Tags, a.includeTags, a.excludeTags) {
+ debugLog("operation %s %s is ignored due to tag rules", pp.Method, pp.Path)
+ continue
+ }
+ a.Routes = append(a.Routes, pp)
+ }
+ }
+
+ for _, dt := range file.Decls {
+ switch fd := dt.(type) {
+ case *ast.BadDecl:
+ continue
+ case *ast.FuncDecl:
+ if fd.Body == nil {
+ continue
+ }
+ for _, stmt := range fd.Body.List {
+ if dstm, ok := stmt.(*ast.DeclStmt); ok {
+ if gd, isGD := dstm.Decl.(*ast.GenDecl); isGD {
+ a.processDecl(pkg, file, n, gd)
+ }
+ }
+ }
+ case *ast.GenDecl:
+ a.processDecl(pkg, file, n, fd)
+ }
+ }
+ }
+ return nil
+}
+
+func (a *typeIndex) processDecl(pkg *packages.Package, file *ast.File, n node, gd *ast.GenDecl) {
+ for _, sp := range gd.Specs {
+ switch ts := sp.(type) {
+ case *ast.ValueSpec:
+ debugLog("saw value spec: %v", ts.Names)
+ return
+ case *ast.ImportSpec:
+ debugLog("saw import spec: %v", ts.Name)
+ return
+ case *ast.TypeSpec:
+ def, ok := pkg.TypesInfo.Defs[ts.Name]
+ if !ok {
+ debugLog("couldn't find type info for %s", ts.Name)
+ continue
+ }
+ nt, isNamed := def.Type().(*types.Named)
+ if !isNamed {
+ debugLog("%s is not a named type but a %T", ts.Name, def.Type())
+ continue
+ }
+
+ comments := ts.Doc // type ( /* doc */ Foo struct{} )
+ if comments == nil {
+ comments = gd.Doc // /* doc */ type ( Foo struct{} )
+ }
+
+ decl := &entityDecl{
+ Comments: comments,
+ Type: nt,
+ Ident: ts.Name,
+ Spec: ts,
+ File: file,
+ Pkg: pkg,
+ }
+ key := ts.Name
+ if n&modelNode != 0 && decl.HasModelAnnotation() {
+ a.Models[key] = decl
+ }
+ if n&parametersNode != 0 && decl.HasParameterAnnotation() {
+ a.Parameters = append(a.Parameters, decl)
+ }
+ if n&responseNode != 0 && decl.HasResponseAnnotation() {
+ a.Responses = append(a.Responses, decl)
+ }
+ }
+ }
+}
+
+func (a *typeIndex) walkImports(pkg *packages.Package) error {
+ if a.excludeDeps {
+ return nil
+ }
+ for _, v := range pkg.Imports {
+ if _, known := a.AllPackages[v.PkgPath]; known {
+ continue
+ }
+
+ a.AllPackages[v.PkgPath] = v
+ if err := a.processPackage(v); err != nil {
+ return err
+ }
+ if err := a.walkImports(v); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (a *typeIndex) detectNodes(file *ast.File) (node, error) {
+ var n node
+ for _, comments := range file.Comments {
+ var seenStruct string
+ for _, cline := range comments.List {
+ if cline == nil {
+ continue
+ }
+ }
+
+ for _, cline := range comments.List {
+ if cline == nil {
+ continue
+ }
+
+ matches := rxSwaggerAnnotation.FindStringSubmatch(cline.Text)
+ if len(matches) < 2 {
+ continue
+ }
+
+ switch matches[1] {
+ case "route":
+ n |= routeNode
+ case "operation":
+ n |= operationNode
+ case "model":
+ n |= modelNode
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return 0, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "meta":
+ n |= metaNode
+ case "parameters":
+ n |= parametersNode
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return 0, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "response":
+ n |= responseNode
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return 0, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "strfmt", "name", "discriminated", "file", "enum", "default", "alias", "type":
+ // TODO: perhaps collect these and pass along to avoid lookups later on
+ case "allOf":
+ case "ignore":
+ default:
+ return 0, fmt.Errorf("classifier: unknown swagger annotation %q", matches[1])
+ }
+ }
+ }
+ return n, nil
+}
+
+func debugLog(format string, args ...interface{}) {
+ if Debug {
+ log.Printf(format, args...)
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/doc.go b/vendor/github.com/go-swagger/go-swagger/codescan/doc.go
new file mode 100644
index 000000000..3d4c3539c
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/doc.go
@@ -0,0 +1,6 @@
+/*
+Package codescan provides a scanner for go files that produces a swagger spec document.
+
+This package is intendnd for go1.11 onwards, and does support go modules.
+*/
+package codescan
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/enum.go b/vendor/github.com/go-swagger/go-swagger/codescan/enum.go
new file mode 100644
index 000000000..bc1bb52e7
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/enum.go
@@ -0,0 +1,32 @@
+package codescan
+
+import (
+ "go/ast"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+func getEnumBasicLitValue(basicLit *ast.BasicLit) interface{} {
+ switch basicLit.Kind.String() {
+ case "INT":
+ if result, err := strconv.ParseInt(basicLit.Value, 10, 64); err == nil {
+ return result
+ }
+ case "FLOAT":
+ if result, err := strconv.ParseFloat(basicLit.Value, 64); err == nil {
+ return result
+ }
+ default:
+ return strings.Trim(basicLit.Value, "\"")
+ }
+ return nil
+}
+
+const extEnumDesc = "x-go-enum-desc"
+
+func getEnumDesc(extensions spec.Extensions) (desc string) {
+ desc, _ = extensions.GetString(extEnumDesc)
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/meta.go b/vendor/github.com/go-swagger/go-swagger/codescan/meta.go
new file mode 100644
index 000000000..20dbb7cb8
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/meta.go
@@ -0,0 +1,252 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package codescan
+
+import (
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "net/mail"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+type metaSection struct {
+ Comments *ast.CommentGroup
+}
+
+func metaTOSSetter(meta *spec.Info) func([]string) {
+ return func(lines []string) {
+ meta.TermsOfService = joinDropLast(lines)
+ }
+}
+
+func metaConsumesSetter(meta *spec.Swagger) func([]string) {
+ return func(consumes []string) { meta.Consumes = consumes }
+}
+
+func metaProducesSetter(meta *spec.Swagger) func([]string) {
+ return func(produces []string) { meta.Produces = produces }
+}
+
+func metaSchemeSetter(meta *spec.Swagger) func([]string) {
+ return func(schemes []string) { meta.Schemes = schemes }
+}
+
+func metaSecuritySetter(meta *spec.Swagger) func([]map[string][]string) {
+ return func(secDefs []map[string][]string) { meta.Security = secDefs }
+}
+
+func metaSecurityDefinitionsSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.SecurityDefinitions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ meta.SecurityDefinitions = jsonData
+ return nil
+ }
+}
+
+func metaVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ meta.Extensions = jsonData
+ return nil
+ }
+}
+
+func infoVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ meta.Info.Extensions = jsonData
+ return nil
+ }
+}
+
+func newMetaParser(swspec *spec.Swagger) *sectionedParser {
+ sp := new(sectionedParser)
+ if swspec.Info == nil {
+ swspec.Info = new(spec.Info)
+ }
+ info := swspec.Info
+ sp.setTitle = func(lines []string) {
+ tosave := joinDropLast(lines)
+ if len(tosave) > 0 {
+ tosave = rxStripTitleComments.ReplaceAllString(tosave, "")
+ }
+ info.Title = tosave
+ }
+ sp.setDescription = func(lines []string) { info.Description = joinDropLast(lines) }
+ sp.taggers = []tagParser{
+ newMultiLineTagParser("TOS", newMultilineDropEmptyParser(rxTOS, metaTOSSetter(info)), false),
+ newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, metaConsumesSetter(swspec)), false),
+ newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, metaProducesSetter(swspec)), false),
+ newSingleLineTagParser("Schemes", newSetSchemes(metaSchemeSetter(swspec))),
+ newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, metaSecuritySetter(swspec)), false),
+ newMultiLineTagParser("SecurityDefinitions", newYamlParser(rxSecurity, metaSecurityDefinitionsSetter(swspec)), true),
+ newSingleLineTagParser("Version", &setMetaSingle{swspec, rxVersion, setInfoVersion}),
+ newSingleLineTagParser("Host", &setMetaSingle{swspec, rxHost, setSwaggerHost}),
+ newSingleLineTagParser("BasePath", &setMetaSingle{swspec, rxBasePath, setSwaggerBasePath}),
+ newSingleLineTagParser("Contact", &setMetaSingle{swspec, rxContact, setInfoContact}),
+ newSingleLineTagParser("License", &setMetaSingle{swspec, rxLicense, setInfoLicense}),
+ newMultiLineTagParser("YAMLInfoExtensionsBlock", newYamlParser(rxInfoExtensions, infoVendorExtensibleSetter(swspec)), true),
+ newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, metaVendorExtensibleSetter(swspec)), true),
+ }
+ return sp
+}
+
+type setMetaSingle struct {
+ spec *spec.Swagger
+ rx *regexp.Regexp
+ set func(spec *spec.Swagger, lines []string) error
+}
+
+func (s *setMetaSingle) Matches(line string) bool {
+ return s.rx.MatchString(line)
+}
+
+func (s *setMetaSingle) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := s.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ return s.set(s.spec, []string{matches[1]})
+ }
+ return nil
+}
+
+func setSwaggerHost(swspec *spec.Swagger, lines []string) error {
+ lns := lines
+ if len(lns) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ lns = []string{"localhost"}
+ }
+ swspec.Host = lns[0]
+ return nil
+}
+
+func setSwaggerBasePath(swspec *spec.Swagger, lines []string) error {
+ var ln string
+ if len(lines) > 0 {
+ ln = lines[0]
+ }
+ swspec.BasePath = ln
+ return nil
+}
+
+func setInfoVersion(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 {
+ return nil
+ }
+ info := safeInfo(swspec)
+ info.Version = strings.TrimSpace(lines[0])
+ return nil
+}
+
+func setInfoContact(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ contact, err := parseContactInfo(lines[0])
+ if err != nil {
+ return err
+ }
+ info := safeInfo(swspec)
+ info.Contact = contact
+ return nil
+}
+
+func parseContactInfo(line string) (*spec.ContactInfo, error) {
+ nameEmail, url := splitURL(line)
+ var name, email string
+ if len(nameEmail) > 0 {
+ addr, err := mail.ParseAddress(nameEmail)
+ if err != nil {
+ return nil, err
+ }
+ name, email = addr.Name, addr.Address
+ }
+ return &spec.ContactInfo{
+ ContactInfoProps: spec.ContactInfoProps{
+ URL: url,
+ Name: name,
+ Email: email,
+ },
+ }, nil
+}
+
+func setInfoLicense(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ info := safeInfo(swspec)
+ line := lines[0]
+ name, url := splitURL(line)
+ info.License = &spec.License{
+ LicenseProps: spec.LicenseProps{
+ Name: name,
+ URL: url,
+ },
+ }
+ return nil
+}
+
+func safeInfo(swspec *spec.Swagger) *spec.Info {
+ if swspec.Info == nil {
+ swspec.Info = new(spec.Info)
+ }
+ return swspec.Info
+}
+
+// httpFTPScheme matches http://, https://, ws://, wss://
+var httpFTPScheme = regexp.MustCompile("(?:(?:ht|f)tp|ws)s?://")
+
+func splitURL(line string) (notURL, url string) {
+ str := strings.TrimSpace(line)
+ parts := httpFTPScheme.FindStringIndex(str)
+ if len(parts) == 0 {
+ if len(str) > 0 {
+ notURL = str
+ }
+ return
+ }
+ if len(parts) > 0 {
+ notURL = strings.TrimSpace(str[:parts[0]])
+ url = strings.TrimSpace(str[parts[0]:])
+ }
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/operations.go b/vendor/github.com/go-swagger/go-swagger/codescan/operations.go
new file mode 100644
index 000000000..c6a194526
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/operations.go
@@ -0,0 +1,170 @@
+package codescan
+
+import (
+ "fmt"
+ "go/ast"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+type operationsBuilder struct {
+ ctx *scanCtx
+ path parsedPathContent
+ operations map[string]*spec.Operation
+}
+
+func (o *operationsBuilder) Build(tgt *spec.Paths) error {
+ pthObj := tgt.Paths[o.path.Path]
+
+ op := setPathOperation(
+ o.path.Method, o.path.ID,
+ &pthObj, o.operations[o.path.ID])
+
+ op.Tags = o.path.Tags
+
+ sp := new(yamlSpecScanner)
+ sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
+
+ if err := sp.Parse(o.path.Remaining); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+ if err := sp.UnmarshalSpec(op.UnmarshalJSON); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+
+ if tgt.Paths == nil {
+ tgt.Paths = make(map[string]spec.PathItem)
+ }
+
+ tgt.Paths[o.path.Path] = pthObj
+ return nil
+}
+
+type parsedPathContent struct {
+ Method, Path, ID string
+ Tags []string
+ Remaining *ast.CommentGroup
+}
+
+func parsePathAnnotation(annotation *regexp.Regexp, lines []*ast.Comment) (cnt parsedPathContent) {
+ var justMatched bool
+
+ for _, cmt := range lines {
+ txt := cmt.Text
+ for _, line := range strings.Split(txt, "\n") {
+ matches := annotation.FindStringSubmatch(line)
+ if len(matches) > 3 {
+ cnt.Method, cnt.Path, cnt.ID = matches[1], matches[2], matches[len(matches)-1]
+ cnt.Tags = rxSpace.Split(matches[3], -1)
+ if len(matches[3]) == 0 {
+ cnt.Tags = nil
+ }
+ justMatched = true
+ } else if cnt.Method != "" {
+ if cnt.Remaining == nil {
+ cnt.Remaining = new(ast.CommentGroup)
+ }
+ if !justMatched || strings.TrimSpace(rxStripComments.ReplaceAllString(line, "")) != "" {
+ cc := new(ast.Comment)
+ cc.Slash = cmt.Slash
+ cc.Text = line
+ cnt.Remaining.List = append(cnt.Remaining.List, cc)
+ justMatched = false
+ }
+ }
+ }
+ }
+
+ return
+}
+
+func setPathOperation(method, id string, pthObj *spec.PathItem, op *spec.Operation) *spec.Operation {
+ if op == nil {
+ op = new(spec.Operation)
+ op.ID = id
+ }
+
+ switch strings.ToUpper(method) {
+ case "GET":
+ if pthObj.Get != nil {
+ if id == pthObj.Get.ID {
+ op = pthObj.Get
+ } else {
+ pthObj.Get = op
+ }
+ } else {
+ pthObj.Get = op
+ }
+
+ case "POST":
+ if pthObj.Post != nil {
+ if id == pthObj.Post.ID {
+ op = pthObj.Post
+ } else {
+ pthObj.Post = op
+ }
+ } else {
+ pthObj.Post = op
+ }
+
+ case "PUT":
+ if pthObj.Put != nil {
+ if id == pthObj.Put.ID {
+ op = pthObj.Put
+ } else {
+ pthObj.Put = op
+ }
+ } else {
+ pthObj.Put = op
+ }
+
+ case "PATCH":
+ if pthObj.Patch != nil {
+ if id == pthObj.Patch.ID {
+ op = pthObj.Patch
+ } else {
+ pthObj.Patch = op
+ }
+ } else {
+ pthObj.Patch = op
+ }
+
+ case "HEAD":
+ if pthObj.Head != nil {
+ if id == pthObj.Head.ID {
+ op = pthObj.Head
+ } else {
+ pthObj.Head = op
+ }
+ } else {
+ pthObj.Head = op
+ }
+
+ case "DELETE":
+ if pthObj.Delete != nil {
+ if id == pthObj.Delete.ID {
+ op = pthObj.Delete
+ } else {
+ pthObj.Delete = op
+ }
+ } else {
+ pthObj.Delete = op
+ }
+
+ case "OPTIONS":
+ if pthObj.Options != nil {
+ if id == pthObj.Options.ID {
+ op = pthObj.Options
+ } else {
+ pthObj.Options = op
+ }
+ } else {
+ pthObj.Options = op
+ }
+ }
+
+ return op
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parameters.go b/vendor/github.com/go-swagger/go-swagger/codescan/parameters.go
new file mode 100644
index 000000000..b00916825
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/parameters.go
@@ -0,0 +1,518 @@
+package codescan
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/go/ast/astutil"
+
+ "github.com/pkg/errors"
+
+ "github.com/go-openapi/spec"
+)
+
+type paramTypable struct {
+ param *spec.Parameter
+}
+
+func (pt paramTypable) Level() int { return 0 }
+
+func (pt paramTypable) Typed(tpe, format string) {
+ pt.param.Typed(tpe, format)
+}
+
+func (pt paramTypable) SetRef(ref spec.Ref) {
+ pt.param.Ref = ref
+}
+
+func (pt paramTypable) Items() swaggerTypable {
+ bdt, schema := bodyTypable(pt.param.In, pt.param.Schema)
+ if bdt != nil {
+ pt.param.Schema = schema
+ return bdt
+ }
+
+ if pt.param.Items == nil {
+ pt.param.Items = new(spec.Items)
+ }
+ pt.param.Type = "array"
+ return itemsTypable{pt.param.Items, 1}
+}
+
+func (pt paramTypable) Schema() *spec.Schema {
+ if pt.param.In != "body" {
+ return nil
+ }
+ if pt.param.Schema == nil {
+ pt.param.Schema = new(spec.Schema)
+ }
+ return pt.param.Schema
+}
+
+func (pt paramTypable) AddExtension(key string, value interface{}) {
+ if pt.param.In == "body" {
+ pt.Schema().AddExtension(key, value)
+ } else {
+ pt.param.AddExtension(key, value)
+ }
+}
+
+func (pt paramTypable) WithEnum(values ...interface{}) {
+ pt.param.WithEnum(values...)
+}
+
+func (pt paramTypable) WithEnumDescription(desc string) {
+ if desc == "" {
+ return
+ }
+ pt.param.AddExtension(extEnumDesc, desc)
+}
+
+type itemsTypable struct {
+ items *spec.Items
+ level int
+}
+
+func (pt itemsTypable) Level() int { return pt.level }
+
+func (pt itemsTypable) Typed(tpe, format string) {
+ pt.items.Typed(tpe, format)
+}
+
+func (pt itemsTypable) SetRef(ref spec.Ref) {
+ pt.items.Ref = ref
+}
+
+func (pt itemsTypable) Schema() *spec.Schema {
+ return nil
+}
+
+func (pt itemsTypable) Items() swaggerTypable {
+ if pt.items.Items == nil {
+ pt.items.Items = new(spec.Items)
+ }
+ pt.items.Type = "array"
+ return itemsTypable{pt.items.Items, pt.level + 1}
+}
+
+func (pt itemsTypable) AddExtension(key string, value interface{}) {
+ pt.items.AddExtension(key, value)
+}
+
+func (pt itemsTypable) WithEnum(values ...interface{}) {
+ pt.items.WithEnum(values...)
+}
+
+func (pt itemsTypable) WithEnumDescription(_ string) {
+ // no
+}
+
+type paramValidations struct {
+ current *spec.Parameter
+}
+
+func (sv paramValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv paramValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv paramValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv paramValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv paramValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv paramValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv paramValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv paramValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv paramValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv paramValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv paramValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv paramValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv paramValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+type itemsValidations struct {
+ current *spec.Items
+}
+
+func (sv itemsValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv itemsValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv itemsValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv itemsValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv itemsValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv itemsValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv itemsValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv itemsValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv itemsValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv itemsValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv itemsValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv itemsValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv itemsValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+type parameterBuilder struct {
+ ctx *scanCtx
+ decl *entityDecl
+ postDecls []*entityDecl
+}
+
+func (p *parameterBuilder) Build(operations map[string]*spec.Operation) error {
+
+ // check if there is a swagger:parameters tag that is followed by one or more words,
+ // these words are the ids of the operations this parameter struct applies to
+ // once type name is found convert it to a schema, by looking up the schema in the
+ // parameters dictionary that got passed into this parse method
+ for _, opid := range p.decl.OperationIDS() {
+ operation, ok := operations[opid]
+ if !ok {
+ operation = new(spec.Operation)
+ operations[opid] = operation
+ operation.ID = opid
+ }
+ debugLog("building parameters for: %s", opid)
+
+ // analyze struct body for fields etc
+ // each exported struct field:
+ // * gets a type mapped to a go primitive
+ // * perhaps gets a format
+ // * has to document the validations that apply for the type and the field
+ // * when the struct field points to a model it becomes a ref: #/definitions/ModelName
+ // * comments that aren't tags is used as the description
+ if err := p.buildFromType(p.decl.Type, operation, make(map[string]spec.Parameter)); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (p *parameterBuilder) buildFromType(otpe types.Type, op *spec.Operation, seen map[string]spec.Parameter) error {
+ switch tpe := otpe.(type) {
+ case *types.Pointer:
+ return p.buildFromType(tpe.Elem(), op, seen)
+ case *types.Named:
+ o := tpe.Obj()
+ switch stpe := o.Type().Underlying().(type) {
+ case *types.Struct:
+ debugLog("build from type %s: %T", tpe.Obj().Name(), otpe)
+ if decl, found := p.ctx.DeclForType(o.Type()); found {
+ return p.buildFromStruct(decl, stpe, op, seen)
+ }
+ return p.buildFromStruct(p.decl, stpe, op, seen)
+ default:
+ return errors.Errorf("unhandled type (%T): %s", stpe, o.Type().Underlying().String())
+ }
+ default:
+ return errors.Errorf("unhandled type (%T): %s", otpe, tpe.String())
+ }
+}
+
+func (p *parameterBuilder) buildFromField(fld *types.Var, tpe types.Type, typable swaggerTypable, seen map[string]spec.Parameter) error {
+ debugLog("build from field %s: %T", fld.Name(), tpe)
+ switch ftpe := tpe.(type) {
+ case *types.Basic:
+ return swaggerSchemaForType(ftpe.Name(), typable)
+ case *types.Struct:
+ sb := schemaBuilder{
+ decl: p.decl,
+ ctx: p.ctx,
+ }
+ if err := sb.buildFromType(tpe, typable); err != nil {
+ return err
+ }
+ p.postDecls = append(p.postDecls, sb.postDecls...)
+ return nil
+ case *types.Pointer:
+ return p.buildFromField(fld, ftpe.Elem(), typable, seen)
+ case *types.Interface:
+ sb := schemaBuilder{
+ decl: p.decl,
+ ctx: p.ctx,
+ }
+ if err := sb.buildFromType(tpe, typable); err != nil {
+ return err
+ }
+ p.postDecls = append(p.postDecls, sb.postDecls...)
+ return nil
+ case *types.Array:
+ return p.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
+ case *types.Slice:
+ return p.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
+ case *types.Map:
+ schema := new(spec.Schema)
+ typable.Schema().Typed("object", "").AdditionalProperties = &spec.SchemaOrBool{
+ Schema: schema,
+ }
+ sb := schemaBuilder{
+ decl: p.decl,
+ ctx: p.ctx,
+ }
+ if err := sb.buildFromType(ftpe.Elem(), schemaTypable{schema, typable.Level() + 1}); err != nil {
+ return err
+ }
+ return nil
+ case *types.Named:
+ if decl, found := p.ctx.DeclForType(ftpe.Obj().Type()); found {
+ if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" {
+ typable.Typed("string", "date-time")
+ return nil
+ }
+ if sfnm, isf := strfmtName(decl.Comments); isf {
+ typable.Typed("string", sfnm)
+ return nil
+ }
+ sb := &schemaBuilder{ctx: p.ctx, decl: decl}
+ sb.inferNames()
+ if err := sb.buildFromType(decl.Type, typable); err != nil {
+ return err
+ }
+ p.postDecls = append(p.postDecls, sb.postDecls...)
+ return nil
+ }
+ return errors.Errorf("unable to find package and source file for: %s", ftpe.String())
+ default:
+ return errors.Errorf("unknown type for %s: %T", fld.String(), fld.Type())
+ }
+}
+
+func spExtensionsSetter(ps *spec.Parameter) func(*spec.Extensions) {
+ return func(exts *spec.Extensions) {
+ for name, value := range *exts {
+ addExtension(&ps.VendorExtensible, name, value)
+ }
+ }
+}
+
+func (p *parameterBuilder) buildFromStruct(decl *entityDecl, tpe *types.Struct, op *spec.Operation, seen map[string]spec.Parameter) error {
+ if tpe.NumFields() == 0 {
+ return nil
+ }
+
+ var sequence []string
+
+ for i := 0; i < tpe.NumFields(); i++ {
+ fld := tpe.Field(i)
+
+ if fld.Embedded() {
+ if err := p.buildFromType(fld.Type(), op, seen); err != nil {
+ return err
+ }
+ continue
+ }
+
+ if !fld.Exported() {
+ debugLog("skipping field %s because it's not exported", fld.Name())
+ continue
+ }
+
+ tg := tpe.Tag(i)
+
+ var afld *ast.Field
+ ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
+ for _, an := range ans {
+ at, valid := an.(*ast.Field)
+ if !valid {
+ continue
+ }
+
+ debugLog("field %s: %s(%T) [%q] ==> %s", fld.Name(), fld.Type().String(), fld.Type(), tg, at.Doc.Text())
+ afld = at
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s for %s", fld.String(), tpe.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ name, ignore, _, err := parseJSONTag(afld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ in := "query"
+ // scan for param location first, this changes some behavior down the line
+ if afld.Doc != nil {
+ for _, cmt := range afld.Doc.List {
+ for _, line := range strings.Split(cmt.Text, "\n") {
+ matches := rxIn.FindStringSubmatch(line)
+ if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
+ in = strings.TrimSpace(matches[1])
+ }
+ }
+ }
+ }
+
+ ps := seen[name]
+ ps.In = in
+ var pty swaggerTypable = paramTypable{&ps}
+ if in == "body" {
+ pty = schemaTypable{pty.Schema(), 0}
+ }
+ if in == "formData" && afld.Doc != nil && fileParam(afld.Doc) {
+ pty.Typed("file", "")
+ } else if err := p.buildFromField(fld, fld.Type(), pty, seen); err != nil {
+ return err
+ }
+
+ if strfmtName, ok := strfmtName(afld.Doc); ok {
+ ps.Typed("string", strfmtName)
+ ps.Ref = spec.Ref{}
+ ps.Items = nil
+ }
+
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) {
+ ps.Description = joinDropLast(lines)
+ enumDesc := getEnumDesc(ps.Extensions)
+ if enumDesc != "" {
+ ps.Description += "\n" + enumDesc
+ }
+ }
+ if ps.Ref.String() == "" {
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
+ newSingleLineTagParser("maximum", &setMaximum{paramValidations{&ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{paramValidations{&ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{paramValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{paramValidations{&ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{paramValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{paramValidations{&ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("collectionFormat", &setCollectionFormat{paramValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{paramValidations{&ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{paramValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{paramValidations{&ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{paramValidations{&ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxExampleFmt, "")}),
+ newSingleLineTagParser("required", &setRequiredParam{&ps}),
+ newMultiLineTagParser("Extensions", newSetExtensions(spExtensionsSetter(&ps)), true),
+ }
+
+ itemsTaggers := func(items *spec.Items, level int) []tagParser {
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
+ if items == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.SelectorExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.Sel, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", name)
+ }
+ }
+
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := afld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return err
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ } else {
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
+ newSingleLineTagParser("required", &matchOnlyParam{&ps, rxRequired}),
+ newMultiLineTagParser("Extensions", newSetExtensions(spExtensionsSetter(&ps)), true),
+ }
+ }
+ if err := sp.Parse(afld.Doc); err != nil {
+ return err
+ }
+ if ps.In == "path" {
+ ps.Required = true
+ }
+
+ if ps.Name == "" {
+ ps.Name = name
+ }
+
+ if name != fld.Name() {
+ addExtension(&ps.VendorExtensible, "x-go-name", fld.Name())
+ }
+ seen[name] = ps
+ sequence = append(sequence, name)
+ }
+
+ for _, k := range sequence {
+ p := seen[k]
+ for i, v := range op.Parameters {
+ if v.Name == k {
+ op.Parameters = append(op.Parameters[:i], op.Parameters[i+1:]...)
+ break
+ }
+ }
+ op.Parameters = append(op.Parameters, p)
+ }
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parser.go b/vendor/github.com/go-swagger/go-swagger/codescan/parser.go
new file mode 100644
index 000000000..9637e6c22
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/parser.go
@@ -0,0 +1,1667 @@
+package codescan
+
+import (
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "reflect"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/loads/fmts"
+ "github.com/go-openapi/spec"
+ "github.com/pkg/errors"
+ "gopkg.in/yaml.v3"
+)
+
+func shouldAcceptTag(tags []string, includeTags map[string]bool, excludeTags map[string]bool) bool {
+ for _, tag := range tags {
+ if len(includeTags) > 0 {
+ if includeTags[tag] {
+ return true
+ }
+ } else if len(excludeTags) > 0 {
+ if excludeTags[tag] {
+ return false
+ }
+ }
+ }
+ return len(includeTags) == 0
+}
+
+func shouldAcceptPkg(path string, includePkgs, excludePkgs []string) bool {
+ if len(includePkgs) == 0 && len(excludePkgs) == 0 {
+ return true
+ }
+ for _, pkgName := range includePkgs {
+ matched, _ := regexp.MatchString(pkgName, path)
+ if matched {
+ return true
+ }
+ }
+ for _, pkgName := range excludePkgs {
+ matched, _ := regexp.MatchString(pkgName, path)
+ if matched {
+ return false
+ }
+ }
+ return len(includePkgs) == 0
+}
+
+// Many thanks go to https://github.com/yvasiyarov/swagger
+// this is loosely based on that implementation but for swagger 2.0
+
+func joinDropLast(lines []string) string {
+ l := len(lines)
+ lns := lines
+ if l > 0 && len(strings.TrimSpace(lines[l-1])) == 0 {
+ lns = lines[:l-1]
+ }
+ return strings.Join(lns, "\n")
+}
+
+func removeEmptyLines(lines []string) (notEmpty []string) {
+ for _, l := range lines {
+ if len(strings.TrimSpace(l)) > 0 {
+ notEmpty = append(notEmpty, l)
+ }
+ }
+ return
+}
+
+func rxf(rxp, ar string) *regexp.Regexp {
+ return regexp.MustCompile(fmt.Sprintf(rxp, ar))
+}
+
+func allOfMember(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxAllOf.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func fileParam(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxFileUpload.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func strfmtName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxStrFmt.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func ignored(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxIgnoreOverride.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func enumName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxEnum.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func aliasParam(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxAlias.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func isAliasParam(prop swaggerTypable) bool {
+ var isParam bool
+ if param, ok := prop.(paramTypable); ok {
+ isParam = param.param.In == "query" ||
+ param.param.In == "path" ||
+ param.param.In == "formData"
+ }
+ return isParam
+}
+
+func defaultName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxDefault.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func typeName(comments *ast.CommentGroup) (string, bool) {
+ var typ string
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxType.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ typ = strings.TrimSpace(matches[1])
+ return typ, true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+type swaggerTypable interface {
+ Typed(string, string)
+ SetRef(spec.Ref)
+ Items() swaggerTypable
+ Schema() *spec.Schema
+ Level() int
+ AddExtension(key string, value interface{})
+ WithEnum(...interface{})
+ WithEnumDescription(desc string)
+}
+
+// Map all Go builtin types that have Json representation to Swagger/Json types.
+// See https://golang.org/pkg/builtin/ and http://swagger.io/specification/
+func swaggerSchemaForType(typeName string, prop swaggerTypable) error {
+ switch typeName {
+ case "bool":
+ prop.Typed("boolean", "")
+ case "byte":
+ prop.Typed("integer", "uint8")
+ case "complex128", "complex64":
+ return fmt.Errorf("unsupported builtin %q (no JSON marshaller)", typeName)
+ case "error":
+ // TODO: error is often marshalled into a string but not always (e.g. errors package creates
+ // errors that are marshalled into an empty object), this could be handled the same way
+ // custom JSON marshallers are handled (in future)
+ prop.Typed("string", "")
+ case "float32":
+ prop.Typed("number", "float")
+ case "float64":
+ prop.Typed("number", "double")
+ case "int":
+ prop.Typed("integer", "int64")
+ case "int16":
+ prop.Typed("integer", "int16")
+ case "int32":
+ prop.Typed("integer", "int32")
+ case "int64":
+ prop.Typed("integer", "int64")
+ case "int8":
+ prop.Typed("integer", "int8")
+ case "rune":
+ prop.Typed("integer", "int32")
+ case "string":
+ prop.Typed("string", "")
+ case "uint":
+ prop.Typed("integer", "uint64")
+ case "uint16":
+ prop.Typed("integer", "uint16")
+ case "uint32":
+ prop.Typed("integer", "uint32")
+ case "uint64":
+ prop.Typed("integer", "uint64")
+ case "uint8":
+ prop.Typed("integer", "uint8")
+ case "uintptr":
+ prop.Typed("integer", "uint64")
+ case "object":
+ prop.Typed("object", "")
+ default:
+ return fmt.Errorf("unsupported type %q", typeName)
+ }
+ return nil
+}
+
+func newMultiLineTagParser(name string, parser valueParser, skipCleanUp bool) tagParser {
+ return tagParser{
+ Name: name,
+ MultiLine: true,
+ SkipCleanUp: skipCleanUp,
+ Parser: parser,
+ }
+}
+
+func newSingleLineTagParser(name string, parser valueParser) tagParser {
+ return tagParser{
+ Name: name,
+ MultiLine: false,
+ SkipCleanUp: false,
+ Parser: parser,
+ }
+}
+
+type tagParser struct {
+ Name string
+ MultiLine bool
+ SkipCleanUp bool
+ Lines []string
+ Parser valueParser
+}
+
+func (st *tagParser) Matches(line string) bool {
+ return st.Parser.Matches(line)
+}
+
+func (st *tagParser) Parse(lines []string) error {
+ return st.Parser.Parse(lines)
+}
+
+func newYamlParser(rx *regexp.Regexp, setter func(json.RawMessage) error) valueParser {
+ return &yamlParser{
+ set: setter,
+ rx: rx,
+ }
+}
+
+type yamlParser struct {
+ set func(json.RawMessage) error
+ rx *regexp.Regexp
+}
+
+func (y *yamlParser) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var uncommented []string
+ uncommented = append(uncommented, removeYamlIndent(lines)...)
+
+ yamlContent := strings.Join(uncommented, "\n")
+ var yamlValue interface{}
+ err := yaml.Unmarshal([]byte(yamlContent), &yamlValue)
+ if err != nil {
+ return err
+ }
+
+ var jsonValue json.RawMessage
+ jsonValue, err = fmts.YAMLToJSON(yamlValue)
+ if err != nil {
+ return err
+ }
+
+ return y.set(jsonValue)
+}
+
+func (y *yamlParser) Matches(line string) bool {
+ return y.rx.MatchString(line)
+}
+
+// aggregates lines in header until it sees `---`,
+// the beginning of a YAML spec
+type yamlSpecScanner struct {
+ header []string
+ yamlSpec []string
+ setTitle func([]string)
+ setDescription func([]string)
+ workedOutTitle bool
+ title []string
+ skipHeader bool
+}
+
+func cleanupScannerLines(lines []string, ur *regexp.Regexp, yamlBlock *regexp.Regexp) []string {
+ // bail early when there is nothing to parse
+ if len(lines) == 0 {
+ return lines
+ }
+ seenLine := -1
+ var lastContent int
+ var uncommented []string
+ var startBlock bool
+ var yamlLines []string
+ for i, v := range lines {
+ if yamlBlock != nil && yamlBlock.MatchString(v) && !startBlock {
+ startBlock = true
+ if seenLine < 0 {
+ seenLine = i
+ }
+ continue
+ }
+ if startBlock {
+ if yamlBlock != nil && yamlBlock.MatchString(v) {
+ startBlock = false
+ uncommented = append(uncommented, removeIndent(yamlLines)...)
+ continue
+ }
+ yamlLines = append(yamlLines, v)
+ if v != "" {
+ if seenLine < 0 {
+ seenLine = i
+ }
+ lastContent = i
+ }
+ continue
+ }
+ str := ur.ReplaceAllString(v, "")
+ uncommented = append(uncommented, str)
+ if str != "" {
+ if seenLine < 0 {
+ seenLine = i
+ }
+ lastContent = i
+ }
+ }
+
+ // fixes issue #50
+ if seenLine == -1 {
+ return nil
+ }
+ return uncommented[seenLine : lastContent+1]
+}
+
+func (sp *yamlSpecScanner) collectTitleDescription() {
+ if sp.workedOutTitle {
+ return
+ }
+ if sp.setTitle == nil {
+ sp.header = cleanupScannerLines(sp.header, rxUncommentHeaders, nil)
+ return
+ }
+
+ sp.workedOutTitle = true
+ sp.title, sp.header = collectScannerTitleDescription(sp.header)
+}
+
+func (sp *yamlSpecScanner) Title() []string {
+ sp.collectTitleDescription()
+ return sp.title
+}
+
+func (sp *yamlSpecScanner) Description() []string {
+ sp.collectTitleDescription()
+ return sp.header
+}
+
+func (sp *yamlSpecScanner) Parse(doc *ast.CommentGroup) error {
+ if doc == nil {
+ return nil
+ }
+ var startedYAMLSpec bool
+COMMENTS:
+ for _, c := range doc.List {
+ for _, line := range strings.Split(c.Text, "\n") {
+ if rxSwaggerAnnotation.MatchString(line) {
+ break COMMENTS // a new swagger: annotation terminates this parser
+ }
+
+ if !startedYAMLSpec {
+ if rxBeginYAMLSpec.MatchString(line) {
+ startedYAMLSpec = true
+ sp.yamlSpec = append(sp.yamlSpec, line)
+ continue
+ }
+
+ if !sp.skipHeader {
+ sp.header = append(sp.header, line)
+ }
+
+ // no YAML spec yet, moving on
+ continue
+ }
+
+ sp.yamlSpec = append(sp.yamlSpec, line)
+ }
+ }
+ if sp.setTitle != nil {
+ sp.setTitle(sp.Title())
+ }
+ if sp.setDescription != nil {
+ sp.setDescription(sp.Description())
+ }
+ return nil
+}
+
+func (sp *yamlSpecScanner) UnmarshalSpec(u func([]byte) error) (err error) {
+ specYaml := cleanupScannerLines(sp.yamlSpec, rxUncommentYAML, nil)
+ if len(specYaml) == 0 {
+ return errors.New("no spec available to unmarshal")
+ }
+
+ if !strings.Contains(specYaml[0], "---") {
+ return errors.New("yaml spec has to start with `---`")
+ }
+
+ // remove indentation
+ specYaml = removeIndent(specYaml)
+
+ // 1. parse yaml lines
+ yamlValue := make(map[interface{}]interface{})
+
+ yamlContent := strings.Join(specYaml, "\n")
+ err = yaml.Unmarshal([]byte(yamlContent), &yamlValue)
+ if err != nil {
+ return
+ }
+
+ // 2. convert to json
+ var jsonValue json.RawMessage
+ jsonValue, err = fmts.YAMLToJSON(yamlValue)
+ if err != nil {
+ return
+ }
+
+ // 3. unmarshal the json into an interface
+ var data []byte
+ data, err = jsonValue.MarshalJSON()
+ if err != nil {
+ return
+ }
+ err = u(data)
+ if err != nil {
+ return
+ }
+
+ // all parsed, returning...
+ sp.yamlSpec = nil // spec is now consumed, so let's erase the parsed lines
+ return
+}
+
+// removes indent base on the first line
+func removeIndent(spec []string) []string {
+ loc := rxIndent.FindStringIndex(spec[0])
+ if loc[1] == 0 {
+ return spec
+ }
+ for i := range spec {
+ if len(spec[i]) >= loc[1] {
+ spec[i] = spec[i][loc[1]-1:]
+ start := rxNotIndent.FindStringIndex(spec[i])
+ if start[1] == 0 {
+ continue
+ }
+
+ spec[i] = strings.Replace(spec[i], "\t", " ", start[1])
+ }
+ }
+ return spec
+}
+
+// removes indent base on the first line
+func removeYamlIndent(spec []string) []string {
+ loc := rxIndent.FindStringIndex(spec[0])
+ if loc[1] == 0 {
+ return nil
+ }
+ var s []string
+ for i := range spec {
+ if len(spec[i]) >= loc[1] {
+ s = append(s, spec[i][loc[1]-1:])
+ }
+ }
+ return s
+}
+
+// aggregates lines in header until it sees a tag.
+type sectionedParser struct {
+ header []string
+ matched map[string]tagParser
+ annotation valueParser
+
+ seenTag bool
+ skipHeader bool
+ setTitle func([]string)
+ setDescription func([]string)
+ workedOutTitle bool
+ taggers []tagParser
+ currentTagger *tagParser
+ title []string
+ ignored bool
+}
+
+func (st *sectionedParser) collectTitleDescription() {
+ if st.workedOutTitle {
+ return
+ }
+ if st.setTitle == nil {
+ st.header = cleanupScannerLines(st.header, rxUncommentHeaders, nil)
+ return
+ }
+
+ st.workedOutTitle = true
+ st.title, st.header = collectScannerTitleDescription(st.header)
+}
+
+func (st *sectionedParser) Title() []string {
+ st.collectTitleDescription()
+ return st.title
+}
+
+func (st *sectionedParser) Description() []string {
+ st.collectTitleDescription()
+ return st.header
+}
+
+func (st *sectionedParser) Parse(doc *ast.CommentGroup) error {
+ if doc == nil {
+ return nil
+ }
+COMMENTS:
+ for _, c := range doc.List {
+ for _, line := range strings.Split(c.Text, "\n") {
+ if rxSwaggerAnnotation.MatchString(line) {
+ if rxIgnoreOverride.MatchString(line) {
+ st.ignored = true
+ break COMMENTS // an explicit ignore terminates this parser
+ }
+ if st.annotation == nil || !st.annotation.Matches(line) {
+ break COMMENTS // a new swagger: annotation terminates this parser
+ }
+
+ _ = st.annotation.Parse([]string{line})
+ if len(st.header) > 0 {
+ st.seenTag = true
+ }
+ continue
+ }
+
+ var matched bool
+ for _, tg := range st.taggers {
+ tagger := tg
+ if tagger.Matches(line) {
+ st.seenTag = true
+ st.currentTagger = &tagger
+ matched = true
+ break
+ }
+ }
+
+ if st.currentTagger == nil {
+ if !st.skipHeader && !st.seenTag {
+ st.header = append(st.header, line)
+ }
+ // didn't match a tag, moving on
+ continue
+ }
+
+ if st.currentTagger.MultiLine && matched {
+ // the first line of a multiline tagger doesn't count
+ continue
+ }
+
+ ts, ok := st.matched[st.currentTagger.Name]
+ if !ok {
+ ts = *st.currentTagger
+ }
+ ts.Lines = append(ts.Lines, line)
+ if st.matched == nil {
+ st.matched = make(map[string]tagParser)
+ }
+ st.matched[st.currentTagger.Name] = ts
+
+ if !st.currentTagger.MultiLine {
+ st.currentTagger = nil
+ }
+ }
+ }
+ if st.setTitle != nil {
+ st.setTitle(st.Title())
+ }
+ if st.setDescription != nil {
+ st.setDescription(st.Description())
+ }
+ for _, mt := range st.matched {
+ if !mt.SkipCleanUp {
+ mt.Lines = cleanupScannerLines(mt.Lines, rxUncommentHeaders, nil)
+ }
+ if err := mt.Parse(mt.Lines); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+type validationBuilder interface {
+ SetMaximum(float64, bool)
+ SetMinimum(float64, bool)
+ SetMultipleOf(float64)
+
+ SetMinItems(int64)
+ SetMaxItems(int64)
+
+ SetMinLength(int64)
+ SetMaxLength(int64)
+ SetPattern(string)
+
+ SetUnique(bool)
+ SetEnum(string)
+ SetDefault(interface{})
+ SetExample(interface{})
+}
+
+type valueParser interface {
+ Parse([]string) error
+ Matches(string) bool
+}
+
+type operationValidationBuilder interface {
+ validationBuilder
+ SetCollectionFormat(string)
+}
+
+type setMaximum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaximum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[2]) > 0 {
+ max, err := strconv.ParseFloat(matches[2], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaximum(max, matches[1] == "<")
+ }
+ return nil
+}
+
+func (sm *setMaximum) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setMinimum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinimum) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMinimum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[2]) > 0 {
+ min, err := strconv.ParseFloat(matches[2], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinimum(min, matches[1] == ">")
+ }
+ return nil
+}
+
+type setMultipleOf struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMultipleOf) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMultipleOf) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[1]) > 0 {
+ multipleOf, err := strconv.ParseFloat(matches[1], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMultipleOf(multipleOf)
+ }
+ return nil
+}
+
+type setMaxItems struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaxItems) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMaxItems) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ maxItems, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaxItems(maxItems)
+ }
+ return nil
+}
+
+type setMinItems struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinItems) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMinItems) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ minItems, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinItems(minItems)
+ }
+ return nil
+}
+
+type setMaxLength struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaxLength) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ maxLength, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaxLength(maxLength)
+ }
+ return nil
+}
+
+func (sm *setMaxLength) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setMinLength struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinLength) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ minLength, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinLength(minLength)
+ }
+ return nil
+}
+
+func (sm *setMinLength) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setPattern struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setPattern) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sm.builder.SetPattern(matches[1])
+ }
+ return nil
+}
+
+func (sm *setPattern) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setCollectionFormat struct {
+ builder operationValidationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setCollectionFormat) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sm.builder.SetCollectionFormat(matches[1])
+ }
+ return nil
+}
+
+func (sm *setCollectionFormat) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setUnique struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (su *setUnique) Matches(line string) bool {
+ return su.rx.MatchString(line)
+}
+
+func (su *setUnique) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := su.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.builder.SetUnique(req)
+ }
+ return nil
+}
+
+type setEnum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (se *setEnum) Matches(line string) bool {
+ return se.rx.MatchString(line)
+}
+
+func (se *setEnum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := se.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ se.builder.SetEnum(matches[1])
+ }
+ return nil
+}
+
+func parseValueFromSchema(s string, schema *spec.SimpleSchema) (interface{}, error) {
+ if schema != nil {
+ switch strings.Trim(schema.TypeName(), "\"") {
+ case "integer", "int", "int64", "int32", "int16":
+ return strconv.Atoi(s)
+ case "bool", "boolean":
+ return strconv.ParseBool(s)
+ case "number", "float64", "float32":
+ return strconv.ParseFloat(s, 64)
+ case "object":
+ var obj map[string]interface{}
+ if err := json.Unmarshal([]byte(s), &obj); err != nil {
+ // If we can't parse it, just return the string.
+ return s, nil
+ }
+ return obj, nil
+ case "array":
+ var slice []interface{}
+ if err := json.Unmarshal([]byte(s), &slice); err != nil {
+ // If we can't parse it, just return the string.
+ return s, nil
+ }
+ return slice, nil
+ default:
+ return s, nil
+ }
+ } else {
+ return s, nil
+ }
+}
+
+type setDefault struct {
+ scheme *spec.SimpleSchema
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sd *setDefault) Matches(line string) bool {
+ return sd.rx.MatchString(line)
+}
+
+func (sd *setDefault) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sd.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ d, err := parseValueFromSchema(matches[1], sd.scheme)
+ if err != nil {
+ return err
+ }
+ sd.builder.SetDefault(d)
+ }
+ return nil
+}
+
+type setExample struct {
+ scheme *spec.SimpleSchema
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (se *setExample) Matches(line string) bool {
+ return se.rx.MatchString(line)
+}
+
+func (se *setExample) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := se.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ d, err := parseValueFromSchema(matches[1], se.scheme)
+ if err != nil {
+ return err
+ }
+ se.builder.SetExample(d)
+ }
+ return nil
+}
+
+type matchOnlyParam struct {
+ tgt *spec.Parameter
+ rx *regexp.Regexp
+}
+
+func (mo *matchOnlyParam) Matches(line string) bool {
+ return mo.rx.MatchString(line)
+}
+
+func (mo *matchOnlyParam) Parse(_ []string) error {
+ return nil
+}
+
+type setRequiredParam struct {
+ tgt *spec.Parameter
+}
+
+func (su *setRequiredParam) Matches(line string) bool {
+ return rxRequired.MatchString(line)
+}
+
+func (su *setRequiredParam) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxRequired.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.tgt.Required = req
+ }
+ return nil
+}
+
+type setReadOnlySchema struct {
+ tgt *spec.Schema
+}
+
+func (su *setReadOnlySchema) Matches(line string) bool {
+ return rxReadOnly.MatchString(line)
+}
+
+func (su *setReadOnlySchema) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxReadOnly.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.tgt.ReadOnly = req
+ }
+ return nil
+}
+
+type setDeprecatedOp struct {
+ tgt *spec.Operation
+}
+
+func (su *setDeprecatedOp) Matches(line string) bool {
+ return rxDeprecated.MatchString(line)
+}
+
+func (su *setDeprecatedOp) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxDeprecated.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.tgt.Deprecated = req
+ }
+ return nil
+}
+
+type setDiscriminator struct {
+ schema *spec.Schema
+ field string
+}
+
+func (su *setDiscriminator) Matches(line string) bool {
+ return rxDiscriminator.MatchString(line)
+}
+
+func (su *setDiscriminator) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxDiscriminator.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ if req {
+ su.schema.Discriminator = su.field
+ } else if su.schema.Discriminator == su.field {
+ su.schema.Discriminator = ""
+ }
+ }
+ return nil
+}
+
+type setRequiredSchema struct {
+ schema *spec.Schema
+ field string
+}
+
+func (su *setRequiredSchema) Matches(line string) bool {
+ return rxRequired.MatchString(line)
+}
+
+func (su *setRequiredSchema) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxRequired.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ midx := -1
+ for i, nm := range su.schema.Required {
+ if nm == su.field {
+ midx = i
+ break
+ }
+ }
+ if req {
+ if midx < 0 {
+ su.schema.Required = append(su.schema.Required, su.field)
+ }
+ } else if midx >= 0 {
+ su.schema.Required = append(su.schema.Required[:midx], su.schema.Required[midx+1:]...)
+ }
+ }
+ return nil
+}
+
+func newMultilineDropEmptyParser(rx *regexp.Regexp, set func([]string)) *multiLineDropEmptyParser {
+ return &multiLineDropEmptyParser{
+ rx: rx,
+ set: set,
+ }
+}
+
+type multiLineDropEmptyParser struct {
+ set func([]string)
+ rx *regexp.Regexp
+}
+
+func (m *multiLineDropEmptyParser) Matches(line string) bool {
+ return m.rx.MatchString(line)
+}
+
+func (m *multiLineDropEmptyParser) Parse(lines []string) error {
+ m.set(removeEmptyLines(lines))
+ return nil
+}
+
+func newSetSchemes(set func([]string)) *setSchemes {
+ return &setSchemes{
+ set: set,
+ rx: rxSchemes,
+ }
+}
+
+type setSchemes struct {
+ set func([]string)
+ rx *regexp.Regexp
+}
+
+func (ss *setSchemes) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+func (ss *setSchemes) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := ss.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sch := strings.Split(matches[1], ", ")
+
+ schemes := []string{}
+ for _, s := range sch {
+ ts := strings.TrimSpace(s)
+ if ts != "" {
+ schemes = append(schemes, ts)
+ }
+ }
+ ss.set(schemes)
+ }
+ return nil
+}
+
+func newSetSecurity(rx *regexp.Regexp, setter func([]map[string][]string)) *setSecurity {
+ return &setSecurity{
+ set: setter,
+ rx: rx,
+ }
+}
+
+type setSecurity struct {
+ set func([]map[string][]string)
+ rx *regexp.Regexp
+}
+
+func (ss *setSecurity) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+func (ss *setSecurity) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var result []map[string][]string
+ for _, line := range lines {
+ kv := strings.SplitN(line, ":", 2)
+ scopes := []string{}
+ var key string
+
+ if len(kv) > 1 {
+ scs := strings.Split(kv[1], ",")
+ for _, scope := range scs {
+ tr := strings.TrimSpace(scope)
+ if tr != "" {
+ tr = strings.SplitAfter(tr, " ")[0]
+ scopes = append(scopes, strings.TrimSpace(tr))
+ }
+ }
+
+ key = strings.TrimSpace(kv[0])
+
+ result = append(result, map[string][]string{key: scopes})
+ }
+ }
+ ss.set(result)
+ return nil
+}
+
+func newSetResponses(definitions map[string]spec.Schema, responses map[string]spec.Response, setter func(*spec.Response, map[int]spec.Response)) *setOpResponses {
+ return &setOpResponses{
+ set: setter,
+ rx: rxResponses,
+ definitions: definitions,
+ responses: responses,
+ }
+}
+
+type setOpResponses struct {
+ set func(*spec.Response, map[int]spec.Response)
+ rx *regexp.Regexp
+ definitions map[string]spec.Schema
+ responses map[string]spec.Response
+}
+
+func (ss *setOpResponses) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+// ResponseTag used when specifying a response to point to a defined swagger:response
+const ResponseTag = "response"
+
+// BodyTag used when specifying a response to point to a model/schema
+const BodyTag = "body"
+
+// DescriptionTag used when specifying a response that gives a description of the response
+const DescriptionTag = "description"
+
+func parseTags(line string) (modelOrResponse string, arrays int, isDefinitionRef bool, description string, err error) {
+ tags := strings.Split(line, " ")
+ parsedModelOrResponse := false
+
+ for i, tagAndValue := range tags {
+ tagValList := strings.SplitN(tagAndValue, ":", 2)
+ var tag, value string
+ if len(tagValList) > 1 {
+ tag = tagValList[0]
+ value = tagValList[1]
+ } else {
+ // TODO: Print a warning, and in the long term, do not support not tagged values
+ // Add a default tag if none is supplied
+ if i == 0 {
+ tag = ResponseTag
+ } else {
+ tag = DescriptionTag
+ }
+ value = tagValList[0]
+ }
+
+ foundModelOrResponse := false
+ if !parsedModelOrResponse {
+ if tag == BodyTag {
+ foundModelOrResponse = true
+ isDefinitionRef = true
+ }
+ if tag == ResponseTag {
+ foundModelOrResponse = true
+ isDefinitionRef = false
+ }
+ }
+ if foundModelOrResponse {
+ // Read the model or response tag
+ parsedModelOrResponse = true
+ // Check for nested arrays
+ arrays = 0
+ for strings.HasPrefix(value, "[]") {
+ arrays++
+ value = value[2:]
+ }
+ // What's left over is the model name
+ modelOrResponse = value
+ } else {
+ foundDescription := false
+ if tag == DescriptionTag {
+ foundDescription = true
+ }
+ if foundDescription {
+ // Descriptions are special, they make they read the rest of the line
+ descriptionWords := []string{value}
+ if i < len(tags)-1 {
+ descriptionWords = append(descriptionWords, tags[i+1:]...)
+ }
+ description = strings.Join(descriptionWords, " ")
+ break
+ }
+ if tag == ResponseTag || tag == BodyTag || tag == DescriptionTag {
+ err = fmt.Errorf("valid tag %s, but not in a valid position", tag)
+ } else {
+ err = fmt.Errorf("invalid tag: %s", tag)
+ }
+ // return error
+ return
+ }
+ }
+
+ // TODO: Maybe do, if !parsedModelOrResponse {return some error}
+ return
+}
+
+func (ss *setOpResponses) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var def *spec.Response
+ var scr map[int]spec.Response
+
+ for _, line := range lines {
+ kv := strings.SplitN(line, ":", 2)
+ var key, value string
+
+ if len(kv) > 1 {
+ key = strings.TrimSpace(kv[0])
+ if key == "" {
+ // this must be some weird empty line
+ continue
+ }
+ value = strings.TrimSpace(kv[1])
+ if value == "" {
+ var resp spec.Response
+ if strings.EqualFold("default", key) {
+ if def == nil {
+ def = &resp
+ }
+ } else {
+ if sc, err := strconv.Atoi(key); err == nil {
+ if scr == nil {
+ scr = make(map[int]spec.Response)
+ }
+ scr[sc] = resp
+ }
+ }
+ continue
+ }
+ refTarget, arrays, isDefinitionRef, description, err := parseTags(value)
+ if err != nil {
+ return err
+ }
+ // A possible exception for having a definition
+ if _, ok := ss.responses[refTarget]; !ok {
+ if _, ok := ss.definitions[refTarget]; ok {
+ isDefinitionRef = true
+ }
+ }
+
+ var ref spec.Ref
+ if isDefinitionRef {
+ if description == "" {
+ description = refTarget
+ }
+ ref, err = spec.NewRef("#/definitions/" + refTarget)
+ } else {
+ ref, err = spec.NewRef("#/responses/" + refTarget)
+ }
+ if err != nil {
+ return err
+ }
+
+ // description should used on anyway.
+ resp := spec.Response{ResponseProps: spec.ResponseProps{Description: description}}
+
+ if isDefinitionRef {
+ resp.Schema = new(spec.Schema)
+ resp.Description = description
+ if arrays == 0 {
+ resp.Schema.Ref = ref
+ } else {
+ cs := resp.Schema
+ for i := 0; i < arrays; i++ {
+ cs.Typed("array", "")
+ cs.Items = new(spec.SchemaOrArray)
+ cs.Items.Schema = new(spec.Schema)
+ cs = cs.Items.Schema
+ }
+ cs.Ref = ref
+ }
+ // ref. could be empty while use description tag
+ } else if len(refTarget) > 0 {
+ resp.Ref = ref
+ }
+
+ if strings.EqualFold("default", key) {
+ if def == nil {
+ def = &resp
+ }
+ } else {
+ if sc, err := strconv.Atoi(key); err == nil {
+ if scr == nil {
+ scr = make(map[int]spec.Response)
+ }
+ scr[sc] = resp
+ }
+ }
+ }
+ }
+ ss.set(def, scr)
+ return nil
+}
+
+func parseEnum(val string, s *spec.SimpleSchema) []interface{} {
+ list := strings.Split(val, ",")
+ interfaceSlice := make([]interface{}, len(list))
+ for i, d := range list {
+ v, err := parseValueFromSchema(d, s)
+ if err != nil {
+ interfaceSlice[i] = d
+ continue
+ }
+
+ interfaceSlice[i] = v
+ }
+ return interfaceSlice
+}
+
+// AlphaChars used when parsing for Vendor Extensions
+const AlphaChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
+
+func newSetExtensions(setter func(*spec.Extensions)) *setOpExtensions {
+ return &setOpExtensions{
+ set: setter,
+ rx: rxExtensions,
+ }
+}
+
+type setOpExtensions struct {
+ set func(*spec.Extensions)
+ rx *regexp.Regexp
+}
+
+type extensionObject struct {
+ Extension string
+ Root interface{}
+}
+
+type extensionParsingStack []interface{}
+
+// Helper function to walk back through extensions until the proper nest level is reached
+func (stack *extensionParsingStack) walkBack(rawLines []string, lineIndex int) {
+ indent := strings.IndexAny(rawLines[lineIndex], AlphaChars)
+ nextIndent := strings.IndexAny(rawLines[lineIndex+1], AlphaChars)
+ if nextIndent < indent {
+ // Pop elements off the stack until we're back where we need to be
+ runbackIndex := 0
+ poppedIndent := 1000
+ for {
+ checkIndent := strings.IndexAny(rawLines[lineIndex-runbackIndex], AlphaChars)
+ if nextIndent == checkIndent {
+ break
+ }
+ if checkIndent < poppedIndent {
+ *stack = (*stack)[:len(*stack)-1]
+ poppedIndent = checkIndent
+ }
+ runbackIndex++
+ }
+ }
+}
+
+// Recursively parses through the given extension lines, building and adding extension objects as it goes.
+// Extensions may be key:value pairs, arrays, or objects.
+func buildExtensionObjects(rawLines []string, cleanLines []string, lineIndex int, extObjs *[]extensionObject, stack *extensionParsingStack) {
+ if lineIndex >= len(rawLines) {
+ if stack != nil {
+ if ext, ok := (*stack)[0].(extensionObject); ok {
+ *extObjs = append(*extObjs, ext)
+ }
+ }
+ return
+ }
+ kv := strings.SplitN(cleanLines[lineIndex], ":", 2)
+ key := strings.TrimSpace(kv[0])
+ if key == "" {
+ // Some odd empty line
+ return
+ }
+
+ nextIsList := false
+ if lineIndex < len(rawLines)-1 {
+ next := strings.SplitAfterN(cleanLines[lineIndex+1], ":", 2)
+ nextIsList = len(next) == 1
+ }
+
+ if len(kv) > 1 {
+ // Should be the start of a map or a key:value pair
+ value := strings.TrimSpace(kv[1])
+
+ if rxAllowedExtensions.MatchString(key) {
+ // New extension started
+ if stack != nil {
+ if ext, ok := (*stack)[0].(extensionObject); ok {
+ *extObjs = append(*extObjs, ext)
+ }
+ }
+
+ if value != "" {
+ ext := extensionObject{
+ Extension: key,
+ }
+ // Extension is simple key:value pair, no stack
+ ext.Root = make(map[string]string)
+ ext.Root.(map[string]string)[key] = value
+ *extObjs = append(*extObjs, ext)
+ buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, nil)
+ } else {
+ ext := extensionObject{
+ Extension: key,
+ }
+ if nextIsList {
+ // Extension is an array
+ ext.Root = make(map[string]*[]string)
+ rootList := make([]string, 0)
+ ext.Root.(map[string]*[]string)[key] = &rootList
+ stack = &extensionParsingStack{}
+ *stack = append(*stack, ext)
+ *stack = append(*stack, ext.Root.(map[string]*[]string)[key])
+ } else {
+ // Extension is an object
+ ext.Root = make(map[string]interface{})
+ rootMap := make(map[string]interface{})
+ ext.Root.(map[string]interface{})[key] = rootMap
+ stack = &extensionParsingStack{}
+ *stack = append(*stack, ext)
+ *stack = append(*stack, rootMap)
+ }
+ buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack)
+ }
+ } else if stack != nil && len(*stack) != 0 {
+ stackIndex := len(*stack) - 1
+ if value == "" {
+ if nextIsList {
+ // start of new list
+ newList := make([]string, 0)
+ (*stack)[stackIndex].(map[string]interface{})[key] = &newList
+ *stack = append(*stack, &newList)
+ } else {
+ // start of new map
+ newMap := make(map[string]interface{})
+ (*stack)[stackIndex].(map[string]interface{})[key] = newMap
+ *stack = append(*stack, newMap)
+ }
+ } else {
+ // key:value
+ if reflect.TypeOf((*stack)[stackIndex]).Kind() == reflect.Map {
+ (*stack)[stackIndex].(map[string]interface{})[key] = value
+ }
+ if lineIndex < len(rawLines)-1 && !rxAllowedExtensions.MatchString(cleanLines[lineIndex+1]) {
+ stack.walkBack(rawLines, lineIndex)
+ }
+ }
+ buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack)
+ }
+ } else if stack != nil && len(*stack) != 0 {
+ // Should be a list item
+ stackIndex := len(*stack) - 1
+ list := (*stack)[stackIndex].(*[]string)
+ *list = append(*list, key)
+ (*stack)[stackIndex] = list
+ if lineIndex < len(rawLines)-1 && !rxAllowedExtensions.MatchString(cleanLines[lineIndex+1]) {
+ stack.walkBack(rawLines, lineIndex)
+ }
+ buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack)
+ }
+}
+
+func (ss *setOpExtensions) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+func (ss *setOpExtensions) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ cleanLines := cleanupScannerLines(lines, rxUncommentHeaders, nil)
+
+ exts := new(spec.VendorExtensible)
+ extList := make([]extensionObject, 0)
+ buildExtensionObjects(lines, cleanLines, 0, &extList, nil)
+
+ // Extensions can be one of the following:
+ // key:value pair
+ // list/array
+ // object
+ for _, ext := range extList {
+ if _, ok := ext.Root.(map[string]string); ok {
+ exts.AddExtension(ext.Extension, ext.Root.(map[string]string)[ext.Extension])
+ } else if _, ok := ext.Root.(map[string]*[]string); ok {
+ exts.AddExtension(ext.Extension, *(ext.Root.(map[string]*[]string)[ext.Extension]))
+ } else if _, ok := ext.Root.(map[string]interface{}); ok {
+ exts.AddExtension(ext.Extension, ext.Root.(map[string]interface{})[ext.Extension])
+ } else {
+ debugLog("Unknown Extension type: %s", fmt.Sprint(reflect.TypeOf(ext.Root)))
+ }
+ }
+
+ ss.set(&exts.Extensions)
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go b/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go
new file mode 100644
index 000000000..6ffac76af
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go
@@ -0,0 +1,51 @@
+//go:build go1.19
+// +build go1.19
+
+package codescan
+
+import (
+ "strings"
+)
+
+// a shared function that can be used to split given headers
+// into a title and description
+func collectScannerTitleDescription(headers []string) (title, desc []string) {
+ hdrs := cleanupScannerLines(headers, rxUncommentHeaders, nil)
+
+ idx := -1
+ for i, line := range hdrs {
+ if strings.TrimSpace(line) == "" {
+ idx = i
+ break
+ }
+ }
+
+ if idx > -1 {
+ title = hdrs[:idx]
+ if len(title) > 0 {
+ title[0] = rxTitleStart.ReplaceAllString(title[0], "")
+ }
+ if len(hdrs) > idx+1 {
+ desc = hdrs[idx+1:]
+ } else {
+ desc = nil
+ }
+ return
+ }
+
+ if len(hdrs) > 0 {
+ line := hdrs[0]
+ switch {
+ case rxPunctuationEnd.MatchString(line):
+ title = []string{line}
+ desc = hdrs[1:]
+ case rxTitleStart.MatchString(line):
+ title = []string{rxTitleStart.ReplaceAllString(line, "")}
+ desc = hdrs[1:]
+ default:
+ desc = hdrs
+ }
+ }
+
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go b/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go
new file mode 100644
index 000000000..62eb59a96
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go
@@ -0,0 +1,42 @@
+//go:build !go1.19
+// +build !go1.19
+
+package codescan
+
+import "strings"
+
+// a shared function that can be used to split given headers
+// into a title and description
+func collectScannerTitleDescription(headers []string) (title, desc []string) {
+ hdrs := cleanupScannerLines(headers, rxUncommentHeaders, nil)
+
+ idx := -1
+ for i, line := range hdrs {
+ if strings.TrimSpace(line) == "" {
+ idx = i
+ break
+ }
+ }
+
+ if idx > -1 {
+ title = hdrs[:idx]
+ if len(hdrs) > idx+1 {
+ desc = hdrs[idx+1:]
+ } else {
+ desc = nil
+ }
+ return
+ }
+
+ if len(hdrs) > 0 {
+ line := hdrs[0]
+ if rxPunctuationEnd.MatchString(line) {
+ title = []string{line}
+ desc = hdrs[1:]
+ } else {
+ desc = hdrs
+ }
+ }
+
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/regexprs.go b/vendor/github.com/go-swagger/go-swagger/codescan/regexprs.go
new file mode 100644
index 000000000..365b56f53
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/regexprs.go
@@ -0,0 +1,96 @@
+package codescan
+
+import "regexp"
+
+const (
+ rxMethod = "(\\p{L}+)"
+ rxPath = "((?:/[\\p{L}\\p{N}\\p{Pd}\\p{Pc}{}\\-\\.\\?_~%!$&'()*+,;=:@/]*)+/?)"
+ rxOpTags = "(\\p{L}[\\p{L}\\p{N}\\p{Pd}\\.\\p{Pc}\\p{Zs}]+)"
+ rxOpID = "((?:\\p{L}[\\p{L}\\p{N}\\p{Pd}\\p{Pc}]+)+)"
+
+ rxMaximumFmt = "%s[Mm]ax(?:imum)?\\p{Zs}*:\\p{Zs}*([\\<=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+ rxMinimumFmt = "%s[Mm]in(?:imum)?\\p{Zs}*:\\p{Zs}*([\\>=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+ rxMultipleOfFmt = "%s[Mm]ultiple\\p{Zs}*[Oo]f\\p{Zs}*:\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+
+ rxMaxLengthFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxMinLengthFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxPatternFmt = "%s[Pp]attern\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxCollectionFormatFmt = "%s[Cc]ollection(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ff]ormat)\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxEnumFmt = "%s[Ee]num\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxDefaultFmt = "%s[Dd]efault\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxExampleFmt = "%s[Ee]xample\\p{Zs}*:\\p{Zs}*(.*)$"
+
+ rxMaxItemsFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxMinItemsFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxUniqueFmt = "%s[Uu]nique\\p{Zs}*:\\p{Zs}*(true|false)$"
+
+ rxItemsPrefixFmt = "(?:[Ii]tems[\\.\\p{Zs}]*){%d}"
+)
+
+var (
+ rxSwaggerAnnotation = regexp.MustCompile(`swagger:([\p{L}\p{N}\p{Pd}\p{Pc}]+)`)
+ rxFileUpload = regexp.MustCompile(`swagger:file`)
+ rxStrFmt = regexp.MustCompile(`swagger:strfmt\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxAlias = regexp.MustCompile(`swagger:alias`)
+ rxName = regexp.MustCompile(`swagger:name\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)$`)
+ rxAllOf = regexp.MustCompile(`swagger:allOf\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)?$`)
+ rxModelOverride = regexp.MustCompile(`swagger:model\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxResponseOverride = regexp.MustCompile(`swagger:response\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxParametersOverride = regexp.MustCompile(`swagger:parameters\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\p{Zs}]+)$`)
+ rxEnum = regexp.MustCompile(`swagger:enum\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxIgnoreOverride = regexp.MustCompile(`swagger:ignore\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxDefault = regexp.MustCompile(`swagger:default\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxType = regexp.MustCompile(`swagger:type\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxRoute = regexp.MustCompile(
+ "swagger:route\\p{Zs}*" +
+ rxMethod +
+ "\\p{Zs}*" +
+ rxPath +
+ "(?:\\p{Zs}+" +
+ rxOpTags +
+ ")?\\p{Zs}+" +
+ rxOpID + "\\p{Zs}*$")
+ rxBeginYAMLSpec = regexp.MustCompile(`---\p{Zs}*$`)
+ rxUncommentHeaders = regexp.MustCompile(`^[\p{Zs}\t/\*-]*\|?`)
+ rxUncommentYAML = regexp.MustCompile(`^[\p{Zs}\t]*/*`)
+ rxOperation = regexp.MustCompile(
+ "swagger:operation\\p{Zs}*" +
+ rxMethod +
+ "\\p{Zs}*" +
+ rxPath +
+ "(?:\\p{Zs}+" +
+ rxOpTags +
+ ")?\\p{Zs}+" +
+ rxOpID + "\\p{Zs}*$")
+
+ rxSpace = regexp.MustCompile(`\p{Zs}+`)
+ rxIndent = regexp.MustCompile(`[\p{Zs}\t]*/*[\p{Zs}\t]*[^\p{Zs}\t]`)
+ rxNotIndent = regexp.MustCompile(`[^\p{Zs}\t]`)
+ rxPunctuationEnd = regexp.MustCompile(`\p{Po}$`)
+ rxTitleStart = regexp.MustCompile(`^[#]+\p{Zs}+`)
+ rxStripComments = regexp.MustCompile(`^[^\p{L}\p{N}\p{Pd}\p{Pc}\+]*`)
+ rxStripTitleComments = regexp.MustCompile(`^[^\p{L}]*[Pp]ackage\p{Zs}+[^\p{Zs}]+\p{Zs}*`)
+ rxAllowedExtensions = regexp.MustCompile(`^[Xx]-`)
+
+ rxIn = regexp.MustCompile(`[Ii]n\p{Zs}*:\p{Zs}*(query|path|header|body|formData)$`)
+ rxRequired = regexp.MustCompile(`[Rr]equired\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxDiscriminator = regexp.MustCompile(`[Dd]iscriminator\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxReadOnly = regexp.MustCompile(`[Rr]ead(?:\p{Zs}*|[\p{Pd}\p{Pc}])?[Oo]nly\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxConsumes = regexp.MustCompile(`[Cc]onsumes\p{Zs}*:`)
+ rxProduces = regexp.MustCompile(`[Pp]roduces\p{Zs}*:`)
+ rxSecuritySchemes = regexp.MustCompile(`[Ss]ecurity\p{Zs}*:`)
+ rxSecurity = regexp.MustCompile(`[Ss]ecurity\p{Zs}*[Dd]efinitions:`)
+ rxResponses = regexp.MustCompile(`[Rr]esponses\p{Zs}*:`)
+ rxParameters = regexp.MustCompile(`[Pp]arameters\p{Zs}*:`)
+ rxSchemes = regexp.MustCompile(`[Ss]chemes\p{Zs}*:\p{Zs}*((?:(?:https?|HTTPS?|wss?|WSS?)[\p{Zs},]*)+)$`)
+ rxVersion = regexp.MustCompile(`[Vv]ersion\p{Zs}*:\p{Zs}*(.+)$`)
+ rxHost = regexp.MustCompile(`[Hh]ost\p{Zs}*:\p{Zs}*(.+)$`)
+ rxBasePath = regexp.MustCompile(`[Bb]ase\p{Zs}*-*[Pp]ath\p{Zs}*:\p{Zs}*` + rxPath + "$")
+ rxLicense = regexp.MustCompile(`[Ll]icense\p{Zs}*:\p{Zs}*(.+)$`)
+ rxContact = regexp.MustCompile(`[Cc]ontact\p{Zs}*-?(?:[Ii]info\p{Zs}*)?:\p{Zs}*(.+)$`)
+ rxTOS = regexp.MustCompile(`[Tt](:?erms)?\p{Zs}*-?[Oo]f?\p{Zs}*-?[Ss](?:ervice)?\p{Zs}*:`)
+ rxExtensions = regexp.MustCompile(`[Ee]xtensions\p{Zs}*:`)
+ rxInfoExtensions = regexp.MustCompile(`[In]nfo\p{Zs}*[Ee]xtensions:`)
+ rxDeprecated = regexp.MustCompile(`[Dd]eprecated\p{Zs}*:\p{Zs}*(true|false)$`)
+ // currently unused: rxExample = regexp.MustCompile(`[Ex]ample\p{Zs}*:\p{Zs}*(.*)$`)
+)
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/responses.go b/vendor/github.com/go-swagger/go-swagger/codescan/responses.go
new file mode 100644
index 000000000..350cd3a7b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/responses.go
@@ -0,0 +1,454 @@
+package codescan
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+ "strings"
+
+ "github.com/pkg/errors"
+
+ "golang.org/x/tools/go/ast/astutil"
+
+ "github.com/go-openapi/spec"
+)
+
+type responseTypable struct {
+ in string
+ header *spec.Header
+ response *spec.Response
+}
+
+func (ht responseTypable) Level() int { return 0 }
+
+func (ht responseTypable) Typed(tpe, format string) {
+ ht.header.Typed(tpe, format)
+}
+
+func bodyTypable(in string, schema *spec.Schema) (swaggerTypable, *spec.Schema) {
+ if in == "body" {
+ // get the schema for items on the schema property
+ if schema == nil {
+ schema = new(spec.Schema)
+ }
+ if schema.Items == nil {
+ schema.Items = new(spec.SchemaOrArray)
+ }
+ if schema.Items.Schema == nil {
+ schema.Items.Schema = new(spec.Schema)
+ }
+ schema.Typed("array", "")
+ return schemaTypable{schema.Items.Schema, 1}, schema
+ }
+ return nil, nil
+}
+
+func (ht responseTypable) Items() swaggerTypable {
+ bdt, schema := bodyTypable(ht.in, ht.response.Schema)
+ if bdt != nil {
+ ht.response.Schema = schema
+ return bdt
+ }
+
+ if ht.header.Items == nil {
+ ht.header.Items = new(spec.Items)
+ }
+ ht.header.Type = "array"
+ return itemsTypable{ht.header.Items, 1}
+}
+
+func (ht responseTypable) SetRef(ref spec.Ref) {
+ // having trouble seeing the usefulness of this one here
+ ht.Schema().Ref = ref
+}
+
+func (ht responseTypable) Schema() *spec.Schema {
+ if ht.response.Schema == nil {
+ ht.response.Schema = new(spec.Schema)
+ }
+ return ht.response.Schema
+}
+
+func (ht responseTypable) SetSchema(schema *spec.Schema) {
+ ht.response.Schema = schema
+}
+
+func (ht responseTypable) CollectionOf(items *spec.Items, format string) {
+ ht.header.CollectionOf(items, format)
+}
+
+func (ht responseTypable) AddExtension(key string, value interface{}) {
+ ht.response.AddExtension(key, value)
+}
+
+func (ht responseTypable) WithEnum(values ...interface{}) {
+ ht.header.WithEnum(values)
+}
+
+func (ht responseTypable) WithEnumDescription(_ string) {
+ // no
+}
+
+type headerValidations struct {
+ current *spec.Header
+}
+
+func (sv headerValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv headerValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv headerValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv headerValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv headerValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv headerValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv headerValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv headerValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv headerValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv headerValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv headerValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv headerValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv headerValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+type responseBuilder struct {
+ ctx *scanCtx
+ decl *entityDecl
+ postDecls []*entityDecl
+}
+
+func (r *responseBuilder) Build(responses map[string]spec.Response) error {
+ // check if there is a swagger:response tag that is followed by one or more words,
+ // these words are the ids of the operations this parameter struct applies to
+ // once type name is found convert it to a schema, by looking up the schema in the
+ // parameters dictionary that got passed into this parse method
+
+ name, _ := r.decl.ResponseNames()
+ response := responses[name]
+ debugLog("building response: %s", name)
+
+ // analyze doc comment for the model
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) { response.Description = joinDropLast(lines) }
+ if err := sp.Parse(r.decl.Comments); err != nil {
+ return err
+ }
+
+ // analyze struct body for fields etc
+ // each exported struct field:
+ // * gets a type mapped to a go primitive
+ // * perhaps gets a format
+ // * has to document the validations that apply for the type and the field
+ // * when the struct field points to a model it becomes a ref: #/definitions/ModelName
+ // * comments that aren't tags is used as the description
+ if err := r.buildFromType(r.decl.Type, &response, make(map[string]bool)); err != nil {
+ return err
+ }
+ responses[name] = response
+ return nil
+}
+
+func (r *responseBuilder) buildFromField(fld *types.Var, tpe types.Type, typable swaggerTypable, seen map[string]bool) error {
+ debugLog("build from field %s: %T", fld.Name(), tpe)
+ switch ftpe := tpe.(type) {
+ case *types.Basic:
+ return swaggerSchemaForType(ftpe.Name(), typable)
+ case *types.Struct:
+ sb := schemaBuilder{
+ decl: r.decl,
+ ctx: r.ctx,
+ }
+ if err := sb.buildFromType(tpe, typable); err != nil {
+ return err
+ }
+ r.postDecls = append(r.postDecls, sb.postDecls...)
+ return nil
+ case *types.Pointer:
+ return r.buildFromField(fld, ftpe.Elem(), typable, seen)
+ case *types.Interface:
+ sb := schemaBuilder{
+ decl: r.decl,
+ ctx: r.ctx,
+ }
+ if err := sb.buildFromType(tpe, typable); err != nil {
+ return err
+ }
+ r.postDecls = append(r.postDecls, sb.postDecls...)
+ return nil
+ case *types.Array:
+ return r.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
+ case *types.Slice:
+ return r.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
+ case *types.Map:
+ schema := new(spec.Schema)
+ typable.Schema().Typed("object", "").AdditionalProperties = &spec.SchemaOrBool{
+ Schema: schema,
+ }
+ sb := schemaBuilder{
+ decl: r.decl,
+ ctx: r.ctx,
+ }
+ if err := sb.buildFromType(ftpe.Elem(), schemaTypable{schema, typable.Level() + 1}); err != nil {
+ return err
+ }
+ r.postDecls = append(r.postDecls, sb.postDecls...)
+ return nil
+ case *types.Named:
+ if decl, found := r.ctx.DeclForType(ftpe.Obj().Type()); found {
+ if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" {
+ typable.Typed("string", "date-time")
+ return nil
+ }
+ if sfnm, isf := strfmtName(decl.Comments); isf {
+ typable.Typed("string", sfnm)
+ return nil
+ }
+ sb := &schemaBuilder{ctx: r.ctx, decl: decl}
+ sb.inferNames()
+ if err := sb.buildFromType(decl.Type, typable); err != nil {
+ return err
+ }
+ r.postDecls = append(r.postDecls, sb.postDecls...)
+ return nil
+ }
+ return errors.Errorf("unable to find package and source file for: %s", ftpe.String())
+ default:
+ return errors.Errorf("unknown type for %s: %T", fld.String(), fld.Type())
+ }
+}
+
+func (r *responseBuilder) buildFromType(otpe types.Type, resp *spec.Response, seen map[string]bool) error {
+ switch tpe := otpe.(type) {
+ case *types.Pointer:
+ return r.buildFromType(tpe.Elem(), resp, seen)
+ case *types.Named:
+ o := tpe.Obj()
+ switch stpe := o.Type().Underlying().(type) {
+ case *types.Struct:
+ debugLog("build from type %s: %T", tpe.Obj().Name(), otpe)
+ if decl, found := r.ctx.DeclForType(o.Type()); found {
+ return r.buildFromStruct(decl, stpe, resp, seen)
+ }
+ return r.buildFromStruct(r.decl, stpe, resp, seen)
+ default:
+ if decl, found := r.ctx.DeclForType(o.Type()); found {
+ var schema spec.Schema
+ typable := schemaTypable{schema: &schema, level: 0}
+
+ if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" {
+ typable.Typed("string", "date-time")
+ return nil
+ }
+ if sfnm, isf := strfmtName(decl.Comments); isf {
+ typable.Typed("string", sfnm)
+ return nil
+ }
+ sb := &schemaBuilder{ctx: r.ctx, decl: decl}
+ sb.inferNames()
+ if err := sb.buildFromType(tpe.Underlying(), typable); err != nil {
+ return err
+ }
+ resp.WithSchema(&schema)
+ r.postDecls = append(r.postDecls, sb.postDecls...)
+ return nil
+ }
+ return errors.Errorf("responses can only be structs, did you mean for %s to be the response body?", otpe.String())
+ }
+ default:
+ return errors.New("anonymous types are currently not supported for responses")
+ }
+}
+
+func (r *responseBuilder) buildFromStruct(decl *entityDecl, tpe *types.Struct, resp *spec.Response, seen map[string]bool) error {
+ if tpe.NumFields() == 0 {
+ return nil
+ }
+
+ for i := 0; i < tpe.NumFields(); i++ {
+ fld := tpe.Field(i)
+ if fld.Embedded() {
+
+ if err := r.buildFromType(fld.Type(), resp, seen); err != nil {
+ return err
+ }
+ continue
+ }
+ if fld.Anonymous() {
+ debugLog("skipping anonymous field")
+ continue
+ }
+
+ tg := tpe.Tag(i)
+
+ var afld *ast.Field
+ ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
+ for _, an := range ans {
+ at, valid := an.(*ast.Field)
+ if !valid {
+ continue
+ }
+
+ debugLog("field %s: %s(%T) [%q] ==> %s", fld.Name(), fld.Type().String(), fld.Type(), tg, at.Doc.Text())
+ afld = at
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s for %s", fld.String(), tpe.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ name, ignore, _, err := parseJSONTag(afld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ var in string
+ // scan for param location first, this changes some behavior down the line
+ if afld.Doc != nil {
+ for _, cmt := range afld.Doc.List {
+ for _, line := range strings.Split(cmt.Text, "\n") {
+ matches := rxIn.FindStringSubmatch(line)
+ if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
+ in = strings.TrimSpace(matches[1])
+ }
+ }
+ }
+ }
+
+ ps := resp.Headers[name]
+
+ // support swagger:file for response
+ // An API operation can return a file, such as an image or PDF. In this case,
+ // define the response schema with type: file and specify the appropriate MIME types in the produces section.
+ if afld.Doc != nil && fileParam(afld.Doc) {
+ resp.Schema = &spec.Schema{}
+ resp.Schema.Typed("file", "")
+ } else if err := r.buildFromField(fld, fld.Type(), responseTypable{in, &ps, resp}, seen); err != nil {
+ return err
+ }
+
+ if strfmtName, ok := strfmtName(afld.Doc); ok {
+ ps.Typed("string", strfmtName)
+ }
+
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("maximum", &setMaximum{headerValidations{&ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{headerValidations{&ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{headerValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{headerValidations{&ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{headerValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{headerValidations{&ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("collectionFormat", &setCollectionFormat{headerValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{headerValidations{&ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{headerValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{headerValidations{&ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{headerValidations{&ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxExampleFmt, "")}),
+ }
+ itemsTaggers := func(items *spec.Items, level int) []tagParser {
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
+ if items == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.SelectorExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.Sel, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", name)
+ }
+ }
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := afld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return err
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ if err := sp.Parse(afld.Doc); err != nil {
+ return err
+ }
+
+ if in != "body" {
+ seen[name] = true
+ if resp.Headers == nil {
+ resp.Headers = make(map[string]spec.Header)
+ }
+ resp.Headers[name] = ps
+ }
+ }
+
+ for k := range resp.Headers {
+ if !seen[k] {
+ delete(resp.Headers, k)
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/route_params.go b/vendor/github.com/go-swagger/go-swagger/codescan/route_params.go
new file mode 100644
index 000000000..7ca5b0237
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/route_params.go
@@ -0,0 +1,263 @@
+package codescan
+
+import (
+ "errors"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+const (
+ // ParamDescriptionKey indicates the tag used to define a parameter description in swagger:route
+ ParamDescriptionKey = "description"
+ // ParamNameKey indicates the tag used to define a parameter name in swagger:route
+ ParamNameKey = "name"
+ // ParamInKey indicates the tag used to define a parameter location in swagger:route
+ ParamInKey = "in"
+ // ParamRequiredKey indicates the tag used to declare whether a parameter is required in swagger:route
+ ParamRequiredKey = "required"
+ // ParamTypeKey indicates the tag used to define the parameter type in swagger:route
+ ParamTypeKey = "type"
+ // ParamAllowEmptyKey indicates the tag used to indicate whether a parameter allows empty values in swagger:route
+ ParamAllowEmptyKey = "allowempty"
+
+ // SchemaMinKey indicates the tag used to indicate the minimum value allowed for this type in swagger:route
+ SchemaMinKey = "min"
+ // SchemaMaxKey indicates the tag used to indicate the maximum value allowed for this type in swagger:route
+ SchemaMaxKey = "max"
+ // SchemaEnumKey indicates the tag used to specify the allowed values for this type in swagger:route
+ SchemaEnumKey = "enum"
+ // SchemaFormatKey indicates the expected format for this field in swagger:route
+ SchemaFormatKey = "format"
+ // SchemaDefaultKey indicates the default value for this field in swagger:route
+ SchemaDefaultKey = "default"
+ // SchemaMinLenKey indicates the minimum length this field in swagger:route
+ SchemaMinLenKey = "minlength"
+ // SchemaMaxLenKey indicates the minimum length this field in swagger:route
+ SchemaMaxLenKey = "maxlength"
+
+ // TypeArray is the identifier for an array type in swagger:route
+ TypeArray = "array"
+ // TypeNumber is the identifier for a number type in swagger:route
+ TypeNumber = "number"
+ // TypeInteger is the identifier for an integer type in swagger:route
+ TypeInteger = "integer"
+ // TypeBoolean is the identifier for a boolean type in swagger:route
+ TypeBoolean = "boolean"
+ // TypeBool is the identifier for a boolean type in swagger:route
+ TypeBool = "bool"
+ // TypeObject is the identifier for an object type in swagger:route
+ TypeObject = "object"
+ // TypeString is the identifier for a string type in swagger:route
+ TypeString = "string"
+)
+
+var (
+ validIn = []string{"path", "query", "header", "body", "form"}
+ basicTypes = []string{TypeInteger, TypeNumber, TypeString, TypeBoolean, TypeBool, TypeArray}
+)
+
+func newSetParams(params []*spec.Parameter, setter func([]*spec.Parameter)) *setOpParams {
+ return &setOpParams{
+ set: setter,
+ parameters: params,
+ }
+}
+
+type setOpParams struct {
+ set func([]*spec.Parameter)
+ parameters []*spec.Parameter
+}
+
+func (s *setOpParams) Matches(line string) bool {
+ return rxParameters.MatchString(line)
+}
+
+func (s *setOpParams) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var current *spec.Parameter
+ var extraData map[string]string
+
+ for _, line := range lines {
+ l := strings.TrimSpace(line)
+
+ if strings.HasPrefix(l, "+") {
+ s.finalizeParam(current, extraData)
+ current = new(spec.Parameter)
+ extraData = make(map[string]string)
+ l = strings.TrimPrefix(l, "+")
+ }
+
+ kv := strings.SplitN(l, ":", 2)
+
+ if len(kv) <= 1 {
+ continue
+ }
+
+ key := strings.ToLower(strings.TrimSpace(kv[0]))
+ value := strings.TrimSpace(kv[1])
+
+ if current == nil {
+ return errors.New("invalid route/operation schema provided")
+ }
+
+ switch key {
+ case ParamDescriptionKey:
+ current.Description = value
+ case ParamNameKey:
+ current.Name = value
+ case ParamInKey:
+ v := strings.ToLower(value)
+ if contains(validIn, v) {
+ current.In = v
+ }
+ case ParamRequiredKey:
+ if v, err := strconv.ParseBool(value); err == nil {
+ current.Required = v
+ }
+ case ParamTypeKey:
+ if current.Schema == nil {
+ current.Schema = new(spec.Schema)
+ }
+ if contains(basicTypes, value) {
+ current.Type = strings.ToLower(value)
+ if current.Type == TypeBool {
+ current.Type = TypeBoolean
+ }
+ } else if ref, err := spec.NewRef("#/definitions/" + value); err == nil {
+ current.Type = TypeObject
+ current.Schema.Ref = ref
+ }
+ current.Schema.Type = spec.StringOrArray{current.Type}
+ case ParamAllowEmptyKey:
+ if v, err := strconv.ParseBool(value); err == nil {
+ current.AllowEmptyValue = v
+ }
+ default:
+ extraData[key] = value
+ }
+ }
+
+ s.finalizeParam(current, extraData)
+ s.set(s.parameters)
+ return nil
+}
+
+func (s *setOpParams) finalizeParam(param *spec.Parameter, data map[string]string) {
+ if param == nil {
+ return
+ }
+
+ processSchema(data, param)
+
+ // schema is only allowed for parameters in "body"
+ // see https://swagger.io/specification/v2/#parameterObject
+ switch {
+ case param.In == "body":
+ param.Type = ""
+
+ case param.Schema != nil:
+ // convert schema into validations
+ param.SetValidations(param.Schema.Validations())
+ param.Default = param.Schema.Default
+ param.Format = param.Schema.Format
+ param.Schema = nil
+ }
+
+ s.parameters = append(s.parameters, param)
+}
+
+func processSchema(data map[string]string, param *spec.Parameter) {
+ if param.Schema == nil {
+ return
+ }
+
+ var enumValues []string
+
+ for key, value := range data {
+ switch key {
+ case SchemaMinKey:
+ if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
+ v, _ := strconv.ParseFloat(value, 64)
+ param.Schema.Minimum = &v
+ }
+ case SchemaMaxKey:
+ if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
+ v, _ := strconv.ParseFloat(value, 64)
+ param.Schema.Maximum = &v
+ }
+ case SchemaMinLenKey:
+ if getType(param.Schema) == TypeArray {
+ v, _ := strconv.ParseInt(value, 10, 64)
+ param.Schema.MinLength = &v
+ }
+ case SchemaMaxLenKey:
+ if getType(param.Schema) == TypeArray {
+ v, _ := strconv.ParseInt(value, 10, 64)
+ param.Schema.MaxLength = &v
+ }
+ case SchemaEnumKey:
+ enumValues = strings.Split(value, ",")
+ case SchemaFormatKey:
+ param.Schema.Format = value
+ case SchemaDefaultKey:
+ param.Schema.Default = convert(param.Type, value)
+ }
+ }
+
+ if param.Description != "" {
+ param.Schema.Description = param.Description
+ }
+
+ convertEnum(param.Schema, enumValues)
+}
+
+func convertEnum(schema *spec.Schema, enumValues []string) {
+ if len(enumValues) == 0 {
+ return
+ }
+
+ var finalEnum []interface{}
+ for _, v := range enumValues {
+ finalEnum = append(finalEnum, convert(schema.Type[0], strings.TrimSpace(v)))
+ }
+ schema.Enum = finalEnum
+}
+
+func convert(typeStr, valueStr string) interface{} {
+ switch typeStr {
+ case TypeInteger:
+ fallthrough
+ case TypeNumber:
+ if num, err := strconv.ParseFloat(valueStr, 64); err == nil {
+ return num
+ }
+ case TypeBoolean:
+ fallthrough
+ case TypeBool:
+ if b, err := strconv.ParseBool(valueStr); err == nil {
+ return b
+ }
+ }
+ return valueStr
+}
+
+func getType(schema *spec.Schema) string {
+ if len(schema.Type) == 0 {
+ return ""
+ }
+ return schema.Type[0]
+}
+
+func contains(arr []string, obj string) bool {
+ for _, v := range arr {
+ if v == obj {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/routes.go b/vendor/github.com/go-swagger/go-swagger/codescan/routes.go
new file mode 100644
index 000000000..af58e43f3
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/routes.go
@@ -0,0 +1,93 @@
+package codescan
+
+import (
+ "fmt"
+
+ "github.com/go-openapi/spec"
+)
+
+func opConsumesSetter(op *spec.Operation) func([]string) {
+ return func(consumes []string) { op.Consumes = consumes }
+}
+
+func opProducesSetter(op *spec.Operation) func([]string) {
+ return func(produces []string) { op.Produces = produces }
+}
+
+func opSchemeSetter(op *spec.Operation) func([]string) {
+ return func(schemes []string) { op.Schemes = schemes }
+}
+
+func opSecurityDefsSetter(op *spec.Operation) func([]map[string][]string) {
+ return func(securityDefs []map[string][]string) { op.Security = securityDefs }
+}
+
+func opResponsesSetter(op *spec.Operation) func(*spec.Response, map[int]spec.Response) {
+ return func(def *spec.Response, scr map[int]spec.Response) {
+ if op.Responses == nil {
+ op.Responses = new(spec.Responses)
+ }
+ op.Responses.Default = def
+ op.Responses.StatusCodeResponses = scr
+ }
+}
+
+func opParamSetter(op *spec.Operation) func([]*spec.Parameter) {
+ return func(params []*spec.Parameter) {
+ for _, v := range params {
+ op.AddParam(v)
+ }
+ }
+}
+
+func opExtensionsSetter(op *spec.Operation) func(*spec.Extensions) {
+ return func(exts *spec.Extensions) {
+ for name, value := range *exts {
+ op.AddExtension(name, value)
+ }
+ }
+}
+
+type routesBuilder struct {
+ ctx *scanCtx
+ route parsedPathContent
+ definitions map[string]spec.Schema
+ operations map[string]*spec.Operation
+ responses map[string]spec.Response
+ parameters []*spec.Parameter
+}
+
+func (r *routesBuilder) Build(tgt *spec.Paths) error {
+
+ pthObj := tgt.Paths[r.route.Path]
+ op := setPathOperation(
+ r.route.Method, r.route.ID,
+ &pthObj, r.operations[r.route.ID])
+
+ op.Tags = r.route.Tags
+
+ sp := new(sectionedParser)
+ sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
+ sr := newSetResponses(r.definitions, r.responses, opResponsesSetter(op))
+ spa := newSetParams(r.parameters, opParamSetter(op))
+ sp.taggers = []tagParser{
+ newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, opConsumesSetter(op)), false),
+ newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, opProducesSetter(op)), false),
+ newSingleLineTagParser("Schemes", newSetSchemes(opSchemeSetter(op))),
+ newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, opSecurityDefsSetter(op)), false),
+ newMultiLineTagParser("Parameters", spa, false),
+ newMultiLineTagParser("Responses", sr, false),
+ newSingleLineTagParser("Deprecated", &setDeprecatedOp{op}),
+ newMultiLineTagParser("Extensions", newSetExtensions(opExtensionsSetter(op)), true),
+ }
+ if err := sp.Parse(r.route.Remaining); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+
+ if tgt.Paths == nil {
+ tgt.Paths = make(map[string]spec.PathItem)
+ }
+ tgt.Paths[r.route.Path] = pthObj
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/schema.go b/vendor/github.com/go-swagger/go-swagger/codescan/schema.go
new file mode 100644
index 000000000..8c6723040
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/schema.go
@@ -0,0 +1,1155 @@
+package codescan
+
+import (
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "go/importer"
+ "go/types"
+ "log"
+ "os"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/ast/astutil"
+
+ "github.com/go-openapi/spec"
+ "github.com/pkg/errors"
+)
+
+func addExtension(ve *spec.VendorExtensible, key string, value interface{}) {
+ if os.Getenv("SWAGGER_GENERATE_EXTENSION") == "false" {
+ return
+ }
+
+ ve.AddExtension(key, value)
+}
+
+type schemaTypable struct {
+ schema *spec.Schema
+ level int
+}
+
+func (st schemaTypable) Typed(tpe, format string) {
+ st.schema.Typed(tpe, format)
+}
+
+func (st schemaTypable) SetRef(ref spec.Ref) {
+ st.schema.Ref = ref
+}
+
+func (st schemaTypable) Schema() *spec.Schema {
+ return st.schema
+}
+
+func (st schemaTypable) Items() swaggerTypable {
+ if st.schema.Items == nil {
+ st.schema.Items = new(spec.SchemaOrArray)
+ }
+ if st.schema.Items.Schema == nil {
+ st.schema.Items.Schema = new(spec.Schema)
+ }
+
+ st.schema.Typed("array", "")
+ return schemaTypable{st.schema.Items.Schema, st.level + 1}
+}
+
+func (st schemaTypable) AdditionalProperties() swaggerTypable {
+ if st.schema.AdditionalProperties == nil {
+ st.schema.AdditionalProperties = new(spec.SchemaOrBool)
+ }
+ if st.schema.AdditionalProperties.Schema == nil {
+ st.schema.AdditionalProperties.Schema = new(spec.Schema)
+ }
+
+ st.schema.Typed("object", "")
+ return schemaTypable{st.schema.AdditionalProperties.Schema, st.level + 1}
+}
+
+func (st schemaTypable) Level() int { return st.level }
+
+func (st schemaTypable) AddExtension(key string, value interface{}) {
+ addExtension(&st.schema.VendorExtensible, key, value)
+}
+
+func (st schemaTypable) WithEnum(values ...interface{}) {
+ st.schema.WithEnum(values...)
+}
+
+func (st schemaTypable) WithEnumDescription(desc string) {
+ if desc == "" {
+ return
+ }
+ st.AddExtension(extEnumDesc, desc)
+}
+
+type schemaValidations struct {
+ current *spec.Schema
+}
+
+func (sv schemaValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv schemaValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv schemaValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv schemaValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv schemaValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv schemaValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv schemaValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv schemaValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv schemaValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv schemaValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv schemaValidations) SetExample(val interface{}) { sv.current.Example = val }
+func (sv schemaValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Format: sv.current.Format, Type: sv.current.Type[0]})
+}
+
+type schemaBuilder struct {
+ ctx *scanCtx
+ decl *entityDecl
+ GoName string
+ Name string
+ annotated bool
+ discovered []*entityDecl
+ postDecls []*entityDecl
+}
+
+func (s *schemaBuilder) inferNames() (goName string, name string) {
+ if s.GoName != "" {
+ goName, name = s.GoName, s.Name
+ return
+ }
+
+ goName = s.decl.Ident.Name
+ name = goName
+ defer func() {
+ s.GoName = goName
+ s.Name = name
+ }()
+ if s.decl.Comments == nil {
+ return
+ }
+
+DECLS:
+ for _, cmt := range s.decl.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxModelOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ s.annotated = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ name = matches[1]
+ break DECLS
+ }
+ }
+ }
+ return
+}
+
+func (s *schemaBuilder) Build(definitions map[string]spec.Schema) error {
+ s.inferNames()
+
+ schema := definitions[s.Name]
+ err := s.buildFromDecl(s.decl, &schema)
+ if err != nil {
+ return err
+ }
+ definitions[s.Name] = schema
+ return nil
+}
+
+func (s *schemaBuilder) buildFromDecl(_ *entityDecl, schema *spec.Schema) error {
+ // analyze doc comment for the model
+ sp := new(sectionedParser)
+ sp.setTitle = func(lines []string) { schema.Title = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) {
+ schema.Description = joinDropLast(lines)
+ enumDesc := getEnumDesc(schema.VendorExtensible.Extensions)
+ if enumDesc != "" {
+ schema.Description += "\n" + enumDesc
+ }
+ }
+ if err := sp.Parse(s.decl.Comments); err != nil {
+ return err
+ }
+
+ // if the type is marked to ignore, just return
+ if sp.ignored {
+ return nil
+ }
+
+ switch tpe := s.decl.Type.Obj().Type().(type) {
+ case *types.Basic:
+ debugLog("basic: %v", tpe.Name())
+ case *types.Struct:
+ if err := s.buildFromStruct(s.decl, tpe, schema, make(map[string]string)); err != nil {
+ return err
+ }
+ case *types.Interface:
+ if err := s.buildFromInterface(s.decl, tpe, schema, make(map[string]string)); err != nil {
+ return err
+ }
+ case *types.Array:
+ debugLog("array: %v -> %v", s.decl.Ident.Name, tpe.Elem().String())
+ case *types.Slice:
+ debugLog("slice: %v -> %v", s.decl.Ident.Name, tpe.Elem().String())
+ case *types.Map:
+ debugLog("map: %v -> [%v]%v", s.decl.Ident.Name, tpe.Key().String(), tpe.Elem().String())
+ case *types.Named:
+ o := tpe.Obj()
+ if o != nil {
+ debugLog("got the named type object: %s.%s | isAlias: %t | exported: %t", o.Pkg().Path(), o.Name(), o.IsAlias(), o.Exported())
+ if o.Pkg().Name() == "time" && o.Name() == "Time" {
+ schema.Typed("string", "date-time")
+ return nil
+ }
+
+ ps := schemaTypable{schema, 0}
+ for {
+ ti := s.decl.Pkg.TypesInfo.Types[s.decl.Spec.Type]
+ if ti.IsBuiltin() {
+ break
+ }
+ if ti.IsType() {
+ if err := s.buildFromType(ti.Type, ps); err != nil {
+ return err
+ }
+ break
+ }
+ }
+ }
+ default:
+ log.Printf("WARNING: Missing parser for a %T, skipping model: %s\n", tpe, s.Name)
+ return nil
+ }
+
+ if schema.Ref.String() == "" {
+ if s.Name != s.GoName {
+ addExtension(&schema.VendorExtensible, "x-go-name", s.GoName)
+ }
+ addExtension(&schema.VendorExtensible, "x-go-package", s.decl.Type.Obj().Pkg().Path())
+ }
+ return nil
+}
+
+func (s *schemaBuilder) buildFromTextMarshal(tpe types.Type, tgt swaggerTypable) error {
+ if typePtr, ok := tpe.(*types.Pointer); ok {
+ return s.buildFromTextMarshal(typePtr.Elem(), tgt)
+ }
+
+ typeNamed, ok := tpe.(*types.Named)
+ if !ok {
+ tgt.Typed("string", "")
+ return nil
+ }
+
+ tio := typeNamed.Obj()
+ if tio.Pkg() == nil && tio.Name() == "error" {
+ return swaggerSchemaForType(tio.Name(), tgt)
+ }
+
+ debugLog("named refined type %s.%s", tio.Pkg().Path(), tio.Name())
+ pkg, found := s.ctx.PkgForType(tpe)
+
+ if strings.ToLower(tio.Name()) == "uuid" {
+ tgt.Typed("string", "uuid")
+ return nil
+ }
+
+ if !found {
+ // this must be a builtin
+ debugLog("skipping because package is nil: %s", tpe.String())
+ return nil
+ }
+ if pkg.Name == "time" && tio.Name() == "Time" {
+ tgt.Typed("string", "date-time")
+ return nil
+ }
+ if pkg.PkgPath == "encoding/json" && tio.Name() == "RawMessage" {
+ tgt.Typed("object", "")
+ return nil
+ }
+ cmt, hasComments := s.ctx.FindComments(pkg, tio.Name())
+ if !hasComments {
+ cmt = new(ast.CommentGroup)
+ }
+
+ if sfnm, isf := strfmtName(cmt); isf {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+
+ tgt.Typed("string", "")
+ return nil
+}
+
+func (s *schemaBuilder) buildFromType(tpe types.Type, tgt swaggerTypable) error {
+ pkg, err := importer.Default().Import("encoding")
+ if err != nil {
+ return nil
+ }
+ ifc := pkg.Scope().Lookup("TextMarshaler").Type().Underlying().(*types.Interface)
+
+ // check if the type implements encoding.TextMarshaler interface
+ isTextMarshaler := types.Implements(tpe, ifc)
+ if isTextMarshaler {
+ return s.buildFromTextMarshal(tpe, tgt)
+ }
+
+ switch titpe := tpe.(type) {
+ case *types.Basic:
+ return swaggerSchemaForType(titpe.String(), tgt)
+ case *types.Pointer:
+ return s.buildFromType(titpe.Elem(), tgt)
+ case *types.Struct:
+ return s.buildFromStruct(s.decl, titpe, tgt.Schema(), make(map[string]string))
+ case *types.Interface:
+ return s.buildFromInterface(s.decl, titpe, tgt.Schema(), make(map[string]string))
+ case *types.Slice:
+ return s.buildFromType(titpe.Elem(), tgt.Items())
+ case *types.Array:
+ return s.buildFromType(titpe.Elem(), tgt.Items())
+ case *types.Map:
+ // debugLog("map: %v -> [%v]%v", fld.Name(), ftpe.Key().String(), ftpe.Elem().String())
+ // check if key is a string type, if not print a message
+ // and skip the map property. Only maps with string keys can go into additional properties
+ sch := tgt.Schema()
+ if sch == nil {
+ return errors.New("items doesn't support maps")
+ }
+ eleProp := schemaTypable{sch, tgt.Level()}
+ key := titpe.Key()
+ isTextMarshaler := types.Implements(key, ifc)
+ if key.Underlying().String() == "string" || isTextMarshaler {
+ return s.buildFromType(titpe.Elem(), eleProp.AdditionalProperties())
+ }
+ case *types.Named:
+ tio := titpe.Obj()
+ if tio.Pkg() == nil && tio.Name() == "error" {
+ return swaggerSchemaForType(tio.Name(), tgt)
+ }
+ debugLog("named refined type %s.%s", tio.Pkg().Path(), tio.Name())
+ pkg, found := s.ctx.PkgForType(tpe)
+ if !found {
+ // this must be a builtin
+ debugLog("skipping because package is nil: %s", tpe.String())
+ return nil
+ }
+ if pkg.Name == "time" && tio.Name() == "Time" {
+ tgt.Typed("string", "date-time")
+ return nil
+ }
+ if pkg.PkgPath == "encoding/json" && tio.Name() == "RawMessage" {
+ tgt.Typed("object", "")
+ return nil
+ }
+ cmt, hasComments := s.ctx.FindComments(pkg, tio.Name())
+ if !hasComments {
+ cmt = new(ast.CommentGroup)
+ }
+
+ if typeName, ok := typeName(cmt); ok {
+ _ = swaggerSchemaForType(typeName, tgt)
+ return nil
+ }
+
+ switch utitpe := tpe.Underlying().(type) {
+ case *types.Struct:
+
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" {
+ tgt.Typed("string", "date-time")
+ return nil
+ }
+ if sfnm, isf := strfmtName(cmt); isf {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+ if typeName, ok := typeName(cmt); ok {
+ _ = swaggerSchemaForType(typeName, tgt)
+ return nil
+ }
+
+ return s.makeRef(decl, tgt)
+ }
+ case *types.Interface:
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ return s.makeRef(decl, tgt)
+ }
+ case *types.Basic:
+ if sfnm, isf := strfmtName(cmt); isf {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+
+ if enumName, ok := enumName(cmt); ok {
+ enumValues, enumDesces, _ := s.ctx.FindEnumValues(pkg, enumName)
+ if len(enumValues) > 0 {
+ tgt.WithEnum(enumValues...)
+ enumTypeName := reflect.TypeOf(enumValues[0]).String()
+ _ = swaggerSchemaForType(enumTypeName, tgt)
+ }
+ if len(enumDesces) > 0 {
+ tgt.WithEnumDescription(strings.Join(enumDesces, "\n"))
+ }
+ return nil
+ }
+
+ if defaultName, ok := defaultName(cmt); ok {
+ debugLog(defaultName)
+ return nil
+ }
+
+ if typeName, ok := typeName(cmt); ok {
+ _ = swaggerSchemaForType(typeName, tgt)
+ return nil
+
+ }
+
+ if isAliasParam(tgt) || aliasParam(cmt) {
+ err := swaggerSchemaForType(utitpe.Name(), tgt)
+ if err == nil {
+ return nil
+ }
+ }
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ return s.makeRef(decl, tgt)
+ }
+ return swaggerSchemaForType(utitpe.String(), tgt)
+ case *types.Array:
+ if sfnm, isf := strfmtName(cmt); isf {
+ if sfnm == "byte" {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+ if sfnm == "bsonobjectid" {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+
+ tgt.Items().Typed("string", sfnm)
+ return nil
+ }
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ return s.makeRef(decl, tgt)
+ }
+ return s.buildFromType(utitpe.Elem(), tgt.Items())
+ case *types.Slice:
+ if sfnm, isf := strfmtName(cmt); isf {
+ if sfnm == "byte" {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+ tgt.Items().Typed("string", sfnm)
+ return nil
+ }
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ return s.makeRef(decl, tgt)
+ }
+ return s.buildFromType(utitpe.Elem(), tgt.Items())
+ case *types.Map:
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ return s.makeRef(decl, tgt)
+ }
+ return nil
+
+ default:
+ log.Printf("WARNING: can't figure out object type for named type (%T): %v [alias: %t]", tpe.Underlying(), tpe.Underlying(), titpe.Obj().IsAlias())
+
+ return nil
+ }
+ default:
+ panic(fmt.Sprintf("WARNING: can't determine refined type %s (%T)", titpe.String(), titpe))
+ }
+
+ return nil
+}
+
+func (s *schemaBuilder) buildFromInterface(decl *entityDecl, it *types.Interface, schema *spec.Schema, seen map[string]string) error {
+ if it.Empty() {
+ return nil
+ }
+
+ var (
+ tgt *spec.Schema
+ hasAllOf bool
+ )
+
+ var flist []*ast.Field
+ if specType, ok := decl.Spec.Type.(*ast.InterfaceType); ok {
+ flist = make([]*ast.Field, it.NumEmbeddeds()+it.NumExplicitMethods())
+ copy(flist, specType.Methods.List)
+ // for i := range specType.Methods.List {
+ // flist[i] = specType.Methods.List[i]
+ // }
+ }
+
+ // First collect the embedded interfaces
+ // create refs when the embedded interface is decorated with an allOf annotation
+ for i := 0; i < it.NumEmbeddeds(); i++ {
+ fld := it.EmbeddedType(i)
+
+ switch ftpe := fld.(type) {
+ case *types.Named:
+ o := ftpe.Obj()
+ var afld *ast.Field
+ for _, an := range flist {
+ if len(an.Names) != 0 {
+ continue
+ }
+
+ tpp := decl.Pkg.TypesInfo.Types[an.Type]
+ if tpp.Type.String() != o.Type().String() {
+ continue
+ }
+
+ // decl.
+ debugLog("maybe interface field %s: %s(%T)", o.Name(), o.Type().String(), o.Type())
+ afld = an
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s for %s", fld.String(), it.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ if !allOfMember(afld.Doc) {
+ var newSch spec.Schema
+ if err := s.buildEmbedded(o.Type(), &newSch, seen); err != nil {
+ return err
+ }
+ schema.AllOf = append(schema.AllOf, newSch)
+ hasAllOf = true
+ continue
+ }
+
+ hasAllOf = true
+ if tgt == nil {
+ tgt = &spec.Schema{}
+ }
+ var newSch spec.Schema
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := s.buildAllOf(o.Type(), &newSch); err != nil {
+ return err
+ }
+ if afld.Doc != nil {
+ for _, cmt := range afld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxAllOf.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ mv := matches[ml-1]
+ if mv != "" {
+ schema.AddExtension("x-class", mv)
+ }
+ }
+ }
+ }
+ }
+
+ schema.AllOf = append(schema.AllOf, newSch)
+ default:
+ log.Printf("WARNING: can't figure out object type for allOf named type (%T): %v", ftpe, ftpe.Underlying())
+ }
+ debugLog("got embedded interface: %s {%T}", fld.String(), fld)
+ }
+
+ if tgt == nil {
+ tgt = schema
+ }
+ // We can finally build the actual schema for the struct
+ if tgt.Properties == nil {
+ tgt.Properties = make(map[string]spec.Schema)
+ }
+ tgt.Typed("object", "")
+
+ for i := 0; i < it.NumExplicitMethods(); i++ {
+ fld := it.ExplicitMethod(i)
+ if !fld.Exported() {
+ continue
+ }
+ sig, isSignature := fld.Type().(*types.Signature)
+ if !isSignature {
+ continue
+ }
+ if sig.Params().Len() > 0 {
+ continue
+ }
+ if sig.Results() == nil || sig.Results().Len() != 1 {
+ continue
+ }
+
+ var afld *ast.Field
+ ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
+ // debugLog("got %d nodes (exact: %t)", len(ans), isExact)
+ for _, an := range ans {
+ at, valid := an.(*ast.Field)
+ if !valid {
+ continue
+ }
+
+ debugLog("maybe interface field %s: %s(%T)", fld.Name(), fld.Type().String(), fld.Type())
+ afld = at
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s for %s", fld.String(), it.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ name := fld.Name()
+ if afld.Doc != nil {
+ for _, cmt := range afld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxName.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ name = matches[ml-1]
+ }
+ }
+ }
+ }
+ ps := tgt.Properties[name]
+ if err := s.buildFromType(sig.Results().At(0).Type(), schemaTypable{&ps, 0}); err != nil {
+ return err
+ }
+ if sfName, isStrfmt := strfmtName(afld.Doc); isStrfmt {
+ ps.Typed("string", sfName)
+ ps.Ref = spec.Ref{}
+ ps.Items = nil
+ }
+
+ if err := s.createParser(name, tgt, &ps, afld).Parse(afld.Doc); err != nil {
+ return err
+ }
+
+ if ps.Ref.String() == "" && name != fld.Name() {
+ ps.AddExtension("x-go-name", fld.Name())
+ }
+
+ seen[name] = fld.Name()
+ tgt.Properties[name] = ps
+ }
+
+ if tgt == nil {
+ return nil
+ }
+ if hasAllOf && len(tgt.Properties) > 0 {
+ schema.AllOf = append(schema.AllOf, *tgt)
+ }
+ for k := range tgt.Properties {
+ if _, ok := seen[k]; !ok {
+ delete(tgt.Properties, k)
+ }
+ }
+ return nil
+}
+
+func (s *schemaBuilder) buildFromStruct(decl *entityDecl, st *types.Struct, schema *spec.Schema, seen map[string]string) error {
+ s.ctx.FindComments(decl.Pkg, decl.Type.Obj().Name())
+ cmt, hasComments := s.ctx.FindComments(decl.Pkg, decl.Type.Obj().Name())
+ if !hasComments {
+ cmt = new(ast.CommentGroup)
+ }
+ if typeName, ok := typeName(cmt); ok {
+ _ = swaggerSchemaForType(typeName, schemaTypable{schema: schema})
+ return nil
+ }
+ // First check for all of schemas
+ var tgt *spec.Schema
+ hasAllOf := false
+
+ for i := 0; i < st.NumFields(); i++ {
+ fld := st.Field(i)
+ if !fld.Anonymous() {
+ debugLog("skipping field %q for allOf scan because not anonymous", fld.Name())
+ continue
+ }
+ tg := st.Tag(i)
+
+ debugLog("maybe allof field(%t) %s: %s (%T) [%q](anon: %t, embedded: %t)", fld.IsField(), fld.Name(), fld.Type().String(), fld.Type(), tg, fld.Anonymous(), fld.Embedded())
+ var afld *ast.Field
+ ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
+ // debugLog("got %d nodes (exact: %t)", len(ans), isExact)
+ for _, an := range ans {
+ at, valid := an.(*ast.Field)
+ if !valid {
+ continue
+ }
+
+ debugLog("maybe allof field %s: %s(%T) [%q]", fld.Name(), fld.Type().String(), fld.Type(), tg)
+ afld = at
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s for %s", fld.String(), st.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ _, ignore, _, err := parseJSONTag(afld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ if !allOfMember(afld.Doc) {
+ if tgt == nil {
+ tgt = schema
+ }
+ if err := s.buildEmbedded(fld.Type(), tgt, seen); err != nil {
+ return err
+ }
+ continue
+ }
+ // if this created an allOf property then we have to rejig the schema var
+ // because all the fields collected that aren't from embedded structs should go in
+ // their own proper schema
+ // first process embedded structs in order of embedding
+ hasAllOf = true
+ if tgt == nil {
+ tgt = &spec.Schema{}
+ }
+ var newSch spec.Schema
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := s.buildAllOf(fld.Type(), &newSch); err != nil {
+ return err
+ }
+
+ if afld.Doc != nil {
+ for _, cmt := range afld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxAllOf.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ mv := matches[ml-1]
+ if mv != "" {
+ schema.AddExtension("x-class", mv)
+ }
+ }
+ }
+ }
+ }
+
+ schema.AllOf = append(schema.AllOf, newSch)
+ }
+
+ if tgt == nil {
+ if schema != nil {
+ tgt = schema
+ } else {
+ tgt = &spec.Schema{}
+ }
+ }
+ // We can finally build the actual schema for the struct
+ if tgt.Properties == nil {
+ tgt.Properties = make(map[string]spec.Schema)
+ }
+ tgt.Typed("object", "")
+
+ for i := 0; i < st.NumFields(); i++ {
+ fld := st.Field(i)
+ tg := st.Tag(i)
+
+ if fld.Embedded() {
+ continue
+ }
+
+ if !fld.Exported() {
+ debugLog("skipping field %s because it's not exported", fld.Name())
+ continue
+ }
+
+ var afld *ast.Field
+ ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
+ // debugLog("got %d nodes (exact: %t)", len(ans), isExact)
+ for _, an := range ans {
+ at, valid := an.(*ast.Field)
+ if !valid {
+ continue
+ }
+
+ debugLog("field %s: %s(%T) [%q] ==> %s", fld.Name(), fld.Type().String(), fld.Type(), tg, at.Doc.Text())
+ afld = at
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s", fld.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ name, ignore, isString, err := parseJSONTag(afld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ for seenTagName, seenFieldName := range seen {
+ if seenFieldName == fld.Name() {
+ delete(tgt.Properties, seenTagName)
+ break
+ }
+ }
+ continue
+ }
+
+ ps := tgt.Properties[name]
+ if err = s.buildFromType(fld.Type(), schemaTypable{&ps, 0}); err != nil {
+ return err
+ }
+ if isString {
+ ps.Typed("string", ps.Format)
+ ps.Ref = spec.Ref{}
+ ps.Items = nil
+ }
+ if sfName, isStrfmt := strfmtName(afld.Doc); isStrfmt {
+ ps.Typed("string", sfName)
+ ps.Ref = spec.Ref{}
+ ps.Items = nil
+ }
+
+ if err = s.createParser(name, tgt, &ps, afld).Parse(afld.Doc); err != nil {
+ return err
+ }
+
+ if ps.Ref.String() == "" && name != fld.Name() {
+ addExtension(&ps.VendorExtensible, "x-go-name", fld.Name())
+ }
+
+ // we have 2 cases:
+ // 1. field with different name override tag
+ // 2. field with different name removes tag
+ // so we need to save both tag&name
+ seen[name] = fld.Name()
+ tgt.Properties[name] = ps
+ }
+
+ if tgt == nil {
+ return nil
+ }
+ if hasAllOf && len(tgt.Properties) > 0 {
+ schema.AllOf = append(schema.AllOf, *tgt)
+ }
+ for k := range tgt.Properties {
+ if _, ok := seen[k]; !ok {
+ delete(tgt.Properties, k)
+ }
+ }
+ return nil
+}
+
+func (s *schemaBuilder) buildAllOf(tpe types.Type, schema *spec.Schema) error {
+ debugLog("allOf %s", tpe.Underlying())
+ switch ftpe := tpe.(type) {
+ case *types.Pointer:
+ return s.buildAllOf(ftpe.Elem(), schema)
+ case *types.Named:
+ switch utpe := ftpe.Underlying().(type) {
+ case *types.Struct:
+ decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name())
+ if found {
+ if ftpe.Obj().Pkg().Path() == "time" && ftpe.Obj().Name() == "Time" {
+ schema.Typed("string", "date-time")
+ return nil
+ }
+ if sfnm, isf := strfmtName(decl.Comments); isf {
+ schema.Typed("string", sfnm)
+ return nil
+ }
+ if decl.HasModelAnnotation() {
+ return s.makeRef(decl, schemaTypable{schema, 0})
+ }
+ return s.buildFromStruct(decl, utpe, schema, make(map[string]string))
+ }
+ return errors.Errorf("can't find source file for struct: %s", ftpe.String())
+ case *types.Interface:
+ decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name())
+ if found {
+ if sfnm, isf := strfmtName(decl.Comments); isf {
+ schema.Typed("string", sfnm)
+ return nil
+ }
+ if decl.HasModelAnnotation() {
+ return s.makeRef(decl, schemaTypable{schema, 0})
+ }
+ return s.buildFromInterface(decl, utpe, schema, make(map[string]string))
+ }
+ return errors.Errorf("can't find source file for interface: %s", ftpe.String())
+ default:
+ log.Printf("WARNING: can't figure out object type for allOf named type (%T): %v", ftpe, ftpe.Underlying())
+ return fmt.Errorf("unable to locate source file for allOf %s", utpe.String())
+ }
+ default:
+ log.Printf("WARNING: Missing allOf parser for a %T, skipping field", ftpe)
+ return fmt.Errorf("unable to resolve allOf member for: %v", ftpe)
+ }
+}
+
+func (s *schemaBuilder) buildEmbedded(tpe types.Type, schema *spec.Schema, seen map[string]string) error {
+ debugLog("embedded %s", tpe.Underlying())
+ switch ftpe := tpe.(type) {
+ case *types.Pointer:
+ return s.buildEmbedded(ftpe.Elem(), schema, seen)
+ case *types.Named:
+ debugLog("embedded named type: %T", ftpe.Underlying())
+ switch utpe := ftpe.Underlying().(type) {
+ case *types.Struct:
+ decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name())
+ if found {
+ return s.buildFromStruct(decl, utpe, schema, seen)
+ }
+ return errors.Errorf("can't find source file for struct: %s", ftpe.String())
+ case *types.Interface:
+ decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name())
+ if found {
+ return s.buildFromInterface(decl, utpe, schema, seen)
+ }
+ return errors.Errorf("can't find source file for struct: %s", ftpe.String())
+ default:
+ log.Printf("WARNING: can't figure out object type for embedded named type (%T): %v", ftpe, ftpe.Underlying())
+ }
+ default:
+ log.Printf("WARNING: Missing embedded parser for a %T, skipping model\n", ftpe)
+ return nil
+ }
+ return nil
+}
+
+func (s *schemaBuilder) makeRef(decl *entityDecl, prop swaggerTypable) error {
+ nm, _ := decl.Names()
+ ref, err := spec.NewRef("#/definitions/" + nm)
+ if err != nil {
+ return err
+ }
+ prop.SetRef(ref)
+ s.postDecls = append(s.postDecls, decl)
+ return nil
+}
+
+func (s *schemaBuilder) createParser(nm string, schema, ps *spec.Schema, fld *ast.Field) *sectionedParser {
+ sp := new(sectionedParser)
+
+ schemeType, err := ps.Type.MarshalJSON()
+ if err != nil {
+ return nil
+ }
+
+ if ps.Ref.String() == "" {
+ sp.setDescription = func(lines []string) {
+ ps.Description = joinDropLast(lines)
+ enumDesc := getEnumDesc(ps.VendorExtensible.Extensions)
+ if enumDesc != "" {
+ ps.Description += "\n" + enumDesc
+ }
+ }
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("maximum", &setMaximum{schemaValidations{ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{schemaValidations{ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{schemaValidations{ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{schemaValidations{ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{schemaValidations{ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{schemaValidations{ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{schemaValidations{ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{schemaValidations{ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{schemaValidations{ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{schemaValidations{ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("type", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxExampleFmt, "")}),
+ newSingleLineTagParser("required", &setRequiredSchema{schema, nm}),
+ newSingleLineTagParser("readOnly", &setReadOnlySchema{ps}),
+ newSingleLineTagParser("discriminator", &setDiscriminator{schema, nm}),
+ newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, schemaVendorExtensibleSetter(ps)), true),
+ }
+
+ itemsTaggers := func(items *spec.Schema, level int) []tagParser {
+ schemeType, err := items.Type.MarshalJSON()
+ if err != nil {
+ return nil
+ }
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{schemaValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{schemaValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{schemaValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{schemaValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{schemaValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{schemaValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{schemaValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{schemaValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{schemaValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{schemaValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error) {
+ if items == nil || items.Schema == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items.Schema, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Schema.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items.Schema, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Schema.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", nm)
+ }
+ }
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := fld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return sp
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ } else {
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("required", &setRequiredSchema{schema, nm}),
+ }
+ }
+ return sp
+}
+
+func schemaVendorExtensibleSetter(meta *spec.Schema) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ meta.Extensions = jsonData
+ return nil
+ }
+}
+
+type tagOptions []string
+
+func (t tagOptions) Contain(option string) bool {
+ for i := 1; i < len(t); i++ {
+ if t[i] == option {
+ return true
+ }
+ }
+ return false
+}
+
+func (t tagOptions) Name() string {
+ return t[0]
+}
+
+func parseJSONTag(field *ast.Field) (name string, ignore bool, isString bool, err error) {
+ if len(field.Names) > 0 {
+ name = field.Names[0].Name
+ }
+ if field.Tag == nil || len(strings.TrimSpace(field.Tag.Value)) == 0 {
+ return name, false, false, nil
+ }
+
+ tv, err := strconv.Unquote(field.Tag.Value)
+ if err != nil {
+ return name, false, false, err
+ }
+
+ if strings.TrimSpace(tv) != "" {
+ st := reflect.StructTag(tv)
+ jsonParts := tagOptions(strings.Split(st.Get("json"), ","))
+
+ if jsonParts.Contain("string") {
+ // Need to check if the field type is a scalar. Otherwise, the
+ // ",string" directive doesn't apply.
+ isString = isFieldStringable(field.Type)
+ }
+
+ switch jsonParts.Name() {
+ case "-":
+ return name, true, isString, nil
+ case "":
+ return name, false, isString, nil
+ default:
+ return jsonParts.Name(), false, isString, nil
+ }
+ }
+ return name, false, false, nil
+}
+
+// isFieldStringable check if the field type is a scalar. If the field type is
+// *ast.StarExpr and is pointer type, check if it refers to a scalar.
+// Otherwise, the ",string" directive doesn't apply.
+func isFieldStringable(tpe ast.Expr) bool {
+ if ident, ok := tpe.(*ast.Ident); ok {
+ switch ident.Name {
+ case "int", "int8", "int16", "int32", "int64",
+ "uint", "uint8", "uint16", "uint32", "uint64",
+ "float64", "string", "bool":
+ return true
+ }
+ } else if starExpr, ok := tpe.(*ast.StarExpr); ok {
+ return isFieldStringable(starExpr.X)
+ } else {
+ return false
+ }
+ return false
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/spec.go b/vendor/github.com/go-swagger/go-swagger/codescan/spec.go
new file mode 100644
index 000000000..726787c11
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/spec.go
@@ -0,0 +1,258 @@
+package codescan
+
+import (
+ "go/ast"
+
+ "github.com/go-openapi/spec"
+)
+
+func newSpecBuilder(input *spec.Swagger, sc *scanCtx, scanModels bool) *specBuilder {
+ if input == nil {
+ input = new(spec.Swagger)
+ input.Swagger = "2.0"
+ }
+
+ if input.Paths == nil {
+ input.Paths = new(spec.Paths)
+ }
+ if input.Definitions == nil {
+ input.Definitions = make(map[string]spec.Schema)
+ }
+ if input.Responses == nil {
+ input.Responses = make(map[string]spec.Response)
+ }
+ if input.Extensions == nil {
+ input.Extensions = make(spec.Extensions)
+ }
+
+ return &specBuilder{
+ ctx: sc,
+ input: input,
+ scanModels: scanModels,
+ operations: collectOperationsFromInput(input),
+ definitions: input.Definitions,
+ responses: input.Responses,
+ }
+}
+
+type specBuilder struct {
+ scanModels bool
+ input *spec.Swagger
+ ctx *scanCtx
+ discovered []*entityDecl
+ definitions map[string]spec.Schema
+ responses map[string]spec.Response
+ operations map[string]*spec.Operation
+}
+
+func (s *specBuilder) Build() (*spec.Swagger, error) {
+ if err := s.buildModels(); err != nil {
+ return nil, err
+ }
+
+ if err := s.buildParameters(); err != nil {
+ return nil, err
+ }
+
+ if err := s.buildRespones(); err != nil {
+ return nil, err
+ }
+
+ // build definitions dictionary
+ if err := s.buildDiscovered(); err != nil {
+ return nil, err
+ }
+
+ if err := s.buildRoutes(); err != nil {
+ return nil, err
+ }
+
+ if err := s.buildOperations(); err != nil {
+ return nil, err
+ }
+
+ if err := s.buildMeta(); err != nil {
+ return nil, err
+ }
+
+ if s.input.Swagger == "" {
+ s.input.Swagger = "2.0"
+ }
+
+ return s.input, nil
+}
+
+func (s *specBuilder) buildDiscovered() error {
+ // loop over discovered until all the items are in definitions
+ keepGoing := len(s.discovered) > 0
+ for keepGoing {
+ var queue []*entityDecl
+ for _, d := range s.discovered {
+ nm, _ := d.Names()
+ if _, ok := s.definitions[nm]; !ok {
+ queue = append(queue, d)
+ }
+ }
+ s.discovered = nil
+ for _, sd := range queue {
+ if err := s.buildDiscoveredSchema(sd); err != nil {
+ return err
+ }
+ }
+ keepGoing = len(s.discovered) > 0
+ }
+
+ return nil
+}
+
+func (s *specBuilder) buildDiscoveredSchema(decl *entityDecl) error {
+ sb := &schemaBuilder{
+ ctx: s.ctx,
+ decl: decl,
+ discovered: s.discovered,
+ }
+ if err := sb.Build(s.definitions); err != nil {
+ return err
+ }
+ s.discovered = append(s.discovered, sb.postDecls...)
+ return nil
+}
+
+func (s *specBuilder) buildMeta() error {
+ // build swagger object
+ for _, decl := range s.ctx.app.Meta {
+ if err := newMetaParser(s.input).Parse(decl.Comments); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (s *specBuilder) buildOperations() error {
+ for _, pp := range s.ctx.app.Operations {
+ ob := &operationsBuilder{
+ operations: s.operations,
+ ctx: s.ctx,
+ path: pp,
+ }
+ if err := ob.Build(s.input.Paths); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (s *specBuilder) buildRoutes() error {
+ // build paths dictionary
+ for _, pp := range s.ctx.app.Routes {
+ rb := &routesBuilder{
+ ctx: s.ctx,
+ route: pp,
+ responses: s.responses,
+ operations: s.operations,
+ definitions: s.definitions,
+ }
+ if err := rb.Build(s.input.Paths); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (s *specBuilder) buildRespones() error {
+ // build responses dictionary
+ for _, decl := range s.ctx.app.Responses {
+ rb := &responseBuilder{
+ ctx: s.ctx,
+ decl: decl,
+ }
+ if err := rb.Build(s.responses); err != nil {
+ return err
+ }
+ s.discovered = append(s.discovered, rb.postDecls...)
+ }
+ return nil
+}
+
+func (s *specBuilder) buildParameters() error {
+ // build parameters dictionary
+ for _, decl := range s.ctx.app.Parameters {
+ pb := &parameterBuilder{
+ ctx: s.ctx,
+ decl: decl,
+ }
+ if err := pb.Build(s.operations); err != nil {
+ return err
+ }
+ s.discovered = append(s.discovered, pb.postDecls...)
+ }
+ return nil
+}
+
+func (s *specBuilder) buildModels() error {
+ // build models dictionary
+ if !s.scanModels {
+ return nil
+ }
+
+ for _, decl := range s.ctx.app.Models {
+ if err := s.buildDiscoveredSchema(decl); err != nil {
+ return err
+ }
+ }
+
+ return s.joinExtraModels()
+}
+
+func (s *specBuilder) joinExtraModels() error {
+ tmp := make(map[*ast.Ident]*entityDecl, len(s.ctx.app.ExtraModels))
+ for k, v := range s.ctx.app.ExtraModels {
+ tmp[k] = v
+ s.ctx.app.Models[k] = v
+ delete(s.ctx.app.ExtraModels, k)
+ }
+
+ // process extra models and see if there is any reference to a new extra one
+ for _, decl := range tmp {
+ if err := s.buildDiscoveredSchema(decl); err != nil {
+ return err
+ }
+ }
+
+ if len(s.ctx.app.ExtraModels) > 0 {
+ return s.joinExtraModels()
+ }
+
+ return nil
+}
+
+func collectOperationsFromInput(input *spec.Swagger) map[string]*spec.Operation {
+ operations := make(map[string]*spec.Operation)
+ if input != nil && input.Paths != nil {
+ for _, pth := range input.Paths.Paths {
+ if pth.Get != nil {
+ operations[pth.Get.ID] = pth.Get
+ }
+ if pth.Post != nil {
+ operations[pth.Post.ID] = pth.Post
+ }
+ if pth.Put != nil {
+ operations[pth.Put.ID] = pth.Put
+ }
+ if pth.Patch != nil {
+ operations[pth.Patch.ID] = pth.Patch
+ }
+ if pth.Delete != nil {
+ operations[pth.Delete.ID] = pth.Delete
+ }
+ if pth.Head != nil {
+ operations[pth.Head.ID] = pth.Head
+ }
+ if pth.Options != nil {
+ operations[pth.Options.ID] = pth.Options
+ }
+ }
+ }
+ return operations
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/.gitignore b/vendor/github.com/go-swagger/go-swagger/generator/.gitignore
new file mode 100644
index 000000000..9ab870da8
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/.gitignore
@@ -0,0 +1 @@
+generated/
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/bindata.go b/vendor/github.com/go-swagger/go-swagger/generator/bindata.go
new file mode 100644
index 000000000..379362734
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/bindata.go
@@ -0,0 +1,40 @@
+package generator
+
+import (
+ "embed"
+ "io/fs"
+)
+
+//go:embed templates
+var _bindata embed.FS
+
+// AssetNames returns the names of the assets.
+func AssetNames() []string {
+ names := make([]string, 0)
+ _ = fs.WalkDir(_bindata, "templates", func(path string, d fs.DirEntry, err error) error {
+ if err != nil {
+ return err
+ }
+ names = append(names, path)
+ return nil
+ })
+ return names
+}
+
+// Asset loads and returns the asset for the given name.
+// It returns an error if the asset could not be found or
+// could not be loaded.
+func Asset(name string) ([]byte, error) {
+ return _bindata.ReadFile(name)
+}
+
+// MustAsset is like Asset but panics when Asset would return an error.
+// It simplifies safe initialization of global variables.
+func MustAsset(name string) []byte {
+ a, err := Asset(name)
+ if err != nil {
+ panic("asset: Asset(" + name + "): " + err.Error())
+ }
+
+ return a
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/client.go b/vendor/github.com/go-swagger/go-swagger/generator/client.go
new file mode 100644
index 000000000..037938e35
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/client.go
@@ -0,0 +1,120 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "errors"
+
+ "github.com/go-openapi/swag"
+)
+
+// GenerateClient generates a client library for a swagger spec document.
+func GenerateClient(name string, modelNames, operationIDs []string, opts *GenOpts) error {
+ if err := opts.CheckOpts(); err != nil {
+ return err
+ }
+
+ if err := opts.setTemplates(); err != nil {
+ return err
+ }
+
+ specDoc, analyzed, err := opts.analyzeSpec()
+ if err != nil {
+ return err
+ }
+
+ models, err := gatherModels(specDoc, modelNames)
+ if err != nil {
+ return err
+ }
+
+ operations := gatherOperations(analyzed, operationIDs)
+ if len(operations) == 0 {
+ return errors.New("no operations were selected")
+ }
+
+ generator := appGenerator{
+ Name: appNameOrDefault(specDoc, name, defaultClientName),
+ SpecDoc: specDoc,
+ Analyzed: analyzed,
+ Models: models,
+ Operations: operations,
+ Target: opts.Target,
+ DumpData: opts.DumpData,
+ Package: opts.LanguageOpts.ManglePackageName(opts.ClientPackage, defaultClientTarget),
+ APIPackage: opts.LanguageOpts.ManglePackagePath(opts.APIPackage, defaultOperationsTarget),
+ ModelsPackage: opts.LanguageOpts.ManglePackagePath(opts.ModelPackage, defaultModelsTarget),
+ ServerPackage: opts.LanguageOpts.ManglePackagePath(opts.ServerPackage, defaultServerTarget),
+ ClientPackage: opts.LanguageOpts.ManglePackagePath(opts.ClientPackage, defaultClientTarget),
+ OperationsPackage: opts.LanguageOpts.ManglePackagePath(opts.ClientPackage, defaultClientTarget),
+ Principal: opts.PrincipalAlias(),
+ DefaultScheme: opts.DefaultScheme,
+ DefaultProduces: opts.DefaultProduces,
+ DefaultConsumes: opts.DefaultConsumes,
+ GenOpts: opts,
+ }
+ generator.Receiver = "o"
+ return (&clientGenerator{generator}).Generate()
+}
+
+type clientGenerator struct {
+ appGenerator
+}
+
+func (c *clientGenerator) Generate() error {
+ app, err := c.makeCodegenApp()
+ if err != nil {
+ return err
+ }
+
+ if c.DumpData {
+ return dumpData(swag.ToDynamicJSON(app))
+ }
+
+ if c.GenOpts.IncludeModel {
+ for _, m := range app.Models {
+ if m.IsStream {
+ continue
+ }
+ mod := m
+ if err := c.GenOpts.renderDefinition(&mod); err != nil {
+ return err
+ }
+ }
+ }
+
+ if c.GenOpts.IncludeHandler {
+ for _, g := range app.OperationGroups {
+ opg := g
+ for _, o := range opg.Operations {
+ op := o
+ if err := c.GenOpts.renderOperation(&op); err != nil {
+ return err
+ }
+ }
+ if err := c.GenOpts.renderOperationGroup(&opg); err != nil {
+ return err
+ }
+ }
+ }
+
+ if c.GenOpts.IncludeSupport {
+ if err := c.GenOpts.renderApplication(&app); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/config.go b/vendor/github.com/go-swagger/go-swagger/generator/config.go
new file mode 100644
index 000000000..2d9413218
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/config.go
@@ -0,0 +1,61 @@
+package generator
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/spf13/viper"
+)
+
+// LanguageDefinition in the configuration file.
+type LanguageDefinition struct {
+ Layout SectionOpts `mapstructure:"layout"`
+}
+
+// ConfigureOpts for generation
+func (d *LanguageDefinition) ConfigureOpts(opts *GenOpts) error {
+ opts.Sections = d.Layout
+ if opts.LanguageOpts == nil {
+ opts.LanguageOpts = GoLangOpts()
+ }
+ return nil
+}
+
+// LanguageConfig structure that is obtained from parsing a config file
+type LanguageConfig map[string]LanguageDefinition
+
+// ReadConfig at the specified path, when no path is specified it will look into
+// the current directory and load a .swagger.{yml,json,hcl,toml,properties} file
+// Returns a viper config or an error
+func ReadConfig(fpath string) (*viper.Viper, error) {
+ v := viper.New()
+ if fpath != "" {
+ if !fileExists(fpath, "") {
+ return nil, fmt.Errorf("can't find file for %q", fpath)
+ }
+ file, err := os.Open(fpath)
+ if err != nil {
+ return nil, err
+ }
+ defer func() { _ = file.Close() }()
+ ext := filepath.Ext(fpath)
+ if len(ext) > 0 {
+ ext = ext[1:]
+ }
+ v.SetConfigType(ext)
+ if err := v.ReadConfig(file); err != nil {
+ return nil, err
+ }
+ return v, nil
+ }
+
+ v.SetConfigName(".swagger")
+ v.AddConfigPath(".")
+ if err := v.ReadInConfig(); err != nil {
+ if _, ok := err.(viper.UnsupportedConfigError); !ok && v.ConfigFileUsed() != "" {
+ return nil, err
+ }
+ }
+ return v, nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/debug.go b/vendor/github.com/go-swagger/go-swagger/generator/debug.go
new file mode 100644
index 000000000..61b4b8d48
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/debug.go
@@ -0,0 +1,64 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "encoding/json"
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+ "runtime"
+)
+
+var (
+ // Debug when the env var DEBUG or SWAGGER_DEBUG is not empty
+ // the generators will be very noisy about what they are doing
+ Debug = os.Getenv("DEBUG") != "" || os.Getenv("SWAGGER_DEBUG") != ""
+ // generatorLogger is a debug logger for this package
+ generatorLogger *log.Logger
+)
+
+func debugOptions() {
+ generatorLogger = log.New(os.Stdout, "generator:", log.LstdFlags)
+}
+
+// debugLog wraps log.Printf with a debug-specific logger
+func debugLog(frmt string, args ...interface{}) {
+ if Debug {
+ _, file, pos, _ := runtime.Caller(1)
+ generatorLogger.Printf("%s:%d: %s", filepath.Base(file), pos,
+ fmt.Sprintf(frmt, args...))
+ }
+}
+
+// debugLogAsJSON unmarshals its last arg as pretty JSON
+func debugLogAsJSON(frmt string, args ...interface{}) {
+ if Debug {
+ var dfrmt string
+ _, file, pos, _ := runtime.Caller(1)
+ dargs := make([]interface{}, 0, len(args)+2)
+ dargs = append(dargs, filepath.Base(file), pos)
+ if len(args) > 0 {
+ dfrmt = "%s:%d: " + frmt + "\n%s"
+ bbb, _ := json.MarshalIndent(args[len(args)-1], "", " ")
+ dargs = append(dargs, args[0:len(args)-1]...)
+ dargs = append(dargs, string(bbb))
+ } else {
+ dfrmt = "%s:%d: " + frmt
+ }
+ generatorLogger.Printf(dfrmt, dargs...)
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/discriminators.go b/vendor/github.com/go-swagger/go-swagger/generator/discriminators.go
new file mode 100644
index 000000000..244a2ba56
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/discriminators.go
@@ -0,0 +1,75 @@
+package generator
+
+import (
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+type discInfo struct {
+ Discriminators map[string]discor
+ Discriminated map[string]discee
+}
+
+type discor struct {
+ FieldName string `json:"fieldName"`
+ GoType string `json:"goType"`
+ JSONName string `json:"jsonName"`
+ Children []discee `json:"children"`
+}
+
+type discee struct {
+ FieldName string `json:"fieldName"`
+ FieldValue string `json:"fieldValue"`
+ GoType string `json:"goType"`
+ JSONName string `json:"jsonName"`
+ Ref spec.Ref `json:"ref"`
+ ParentRef spec.Ref `json:"parentRef"`
+}
+
+func discriminatorInfo(doc *analysis.Spec) *discInfo {
+ baseTypes := make(map[string]discor)
+ for _, sch := range doc.AllDefinitions() {
+ if sch.Schema.Discriminator != "" {
+ tpe, _ := sch.Schema.Extensions.GetString(xGoName)
+ if tpe == "" {
+ tpe = swag.ToGoName(sch.Name)
+ }
+ baseTypes[sch.Ref.String()] = discor{
+ FieldName: sch.Schema.Discriminator,
+ GoType: tpe,
+ JSONName: sch.Name,
+ }
+ }
+ }
+
+ subTypes := make(map[string]discee)
+ for _, sch := range doc.SchemasWithAllOf() {
+ for _, ao := range sch.Schema.AllOf {
+ if ao.Ref.String() != "" {
+ if bt, ok := baseTypes[ao.Ref.String()]; ok {
+ name, _ := sch.Schema.Extensions.GetString(xClass)
+ if name == "" {
+ name = sch.Name
+ }
+ tpe, _ := sch.Schema.Extensions.GetString(xGoName)
+ if tpe == "" {
+ tpe = swag.ToGoName(sch.Name)
+ }
+ dce := discee{
+ FieldName: bt.FieldName,
+ FieldValue: name,
+ Ref: sch.Ref,
+ ParentRef: ao.Ref,
+ JSONName: sch.Name,
+ GoType: tpe,
+ }
+ subTypes[sch.Ref.String()] = dce
+ bt.Children = append(bt.Children, dce)
+ baseTypes[ao.Ref.String()] = bt
+ }
+ }
+ }
+ }
+ return &discInfo{Discriminators: baseTypes, Discriminated: subTypes}
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/doc.go b/vendor/github.com/go-swagger/go-swagger/generator/doc.go
new file mode 100644
index 000000000..49cbf3950
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/doc.go
@@ -0,0 +1,78 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package generator provides the code generation library for go-swagger.
+
+# Generating data types
+
+The general idea is that you should rarely see interface{} in the generated code.
+You get a complete representation of a swagger document in somewhat idiomatic go.
+
+To do so, there is a set of mapping patterns that are applied,
+to map a Swagger specification to go types:
+
+ definition of primitive => type alias/name
+ definition of array => type alias/name
+ definition of map => type alias/name
+
+ definition of object
+ with properties => struct
+ definition of $ref => type alias/name
+
+ object with only
+ additional properties => map[string]T
+
+ object with additional
+ properties and properties => custom serializer
+
+ schema with schema array
+ in items => tuple (struct with properties, custom serializer)
+
+ schema with all of => struct
+
+ * allOf schema with $ref => embedded value
+ * allOf schema with properties => properties are included in struct
+ * adding an allOf schema with just "x-isnullable": true or
+ "x-nullable": true turns the schema into a pointer when
+ there are only other extension properties provided
+
+NOTE: anyOf and oneOf JSON-schema constructs are not supported by Swagger 2.0
+
+A property on a definition is a pointer when any one of the following conditions is met:
+
+ it is an object schema (struct)
+ it has x-nullable or x-isnullable as vendor extension
+ it is a primitive where the zero value is valid but would fail validation
+ otherwise strings minLength > 0 or required results in non-pointer
+ numbers min > 0, max < 0 and min < max
+
+JSONSchema and by extension Swagger allow for items that have a fixed size array,
+with the schema describing the items at each index. This can be combined with additional items
+to form some kind of tuple with varargs.
+
+To map this to go it creates a struct that has fixed names and a custom json serializer.
+
+NOTE: the additionalItems keyword is not supported by Swagger 2.0. However, the generator and validator parts
+in go-swagger do.
+
+# Documenting the generated code
+
+The code that is generated also gets the doc comments that are used by the scanner
+to generate a spec from go code. So that after generation you should be able to reverse
+generate a spec from the code that was generated by your spec.
+
+It should be equivalent to the original spec but might miss some default values and examples.
+*/
+package generator
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/formats.go b/vendor/github.com/go-swagger/go-swagger/generator/formats.go
new file mode 100644
index 000000000..3d127333f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/formats.go
@@ -0,0 +1,226 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+// TODO: we may probably find a way to register most of this dynamically from strfmt
+
+// map of function calls to be generated to get the zero value of a given type
+var zeroes = map[string]string{
+ "bool": "false",
+ "float32": "0",
+ "float64": "0",
+ "int": "0",
+ "int8": "0",
+ "int16": "0",
+ "int32": "0",
+ "int64": "0",
+ "string": "\"\"",
+ "uint": "0",
+ "uint8": "0",
+ "uint16": "0",
+ "uint32": "0",
+ "uint64": "0",
+ // Extended formats (23 formats corresponding to the Default registry
+ // provided by go-openapi/strfmt)
+ "strfmt.Base64": "strfmt.Base64([]byte(nil))",
+ "strfmt.CreditCard": "strfmt.CreditCard(\"\")",
+ "strfmt.Date": "strfmt.Date{}",
+ "strfmt.DateTime": "strfmt.DateTime{}",
+ "strfmt.Duration": "strfmt.Duration(0)",
+ "strfmt.Email": "strfmt.Email(\"\")",
+ "strfmt.HexColor": "strfmt.HexColor(\"#000000\")",
+ "strfmt.Hostname": "strfmt.Hostname(\"\")",
+ "strfmt.IPv4": "strfmt.IPv4(\"\")",
+ "strfmt.IPv6": "strfmt.IPv6(\"\")",
+ "strfmt.ISBN": "strfmt.ISBN(\"\")",
+ "strfmt.ISBN10": "strfmt.ISBN10(\"\")",
+ "strfmt.ISBN13": "strfmt.ISBN13(\"\")",
+ "strfmt.MAC": "strfmt.MAC(\"\")",
+ "strfmt.ObjectId": "strfmt.ObjectId{}",
+ "strfmt.Password": "strfmt.Password(\"\")",
+ "strfmt.RGBColor": "strfmt.RGBColor(\"rgb(0,0,0)\")",
+ "strfmt.SSN": "strfmt.SSN(\"\")",
+ "strfmt.URI": "strfmt.URI(\"\")",
+ "strfmt.UUID": "strfmt.UUID(\"\")",
+ "strfmt.UUID3": "strfmt.UUID3(\"\")",
+ "strfmt.UUID4": "strfmt.UUID4(\"\")",
+ "strfmt.UUID5": "strfmt.UUID5(\"\")",
+ // "file": "runtime.File",
+}
+
+// conversion functions from string representation to a numerical or boolean
+// primitive type
+var stringConverters = map[string]string{
+ "bool": "swag.ConvertBool",
+ "float32": "swag.ConvertFloat32",
+ "float64": "swag.ConvertFloat64",
+ "int8": "swag.ConvertInt8",
+ "int16": "swag.ConvertInt16",
+ "int32": "swag.ConvertInt32",
+ "int64": "swag.ConvertInt64",
+ "uint8": "swag.ConvertUint8",
+ "uint16": "swag.ConvertUint16",
+ "uint32": "swag.ConvertUint32",
+ "uint64": "swag.ConvertUint64",
+}
+
+// formatting (string representation) functions from a native representation
+// of a numerical or boolean primitive type
+var stringFormatters = map[string]string{
+ "bool": "swag.FormatBool",
+ "float32": "swag.FormatFloat32",
+ "float64": "swag.FormatFloat64",
+ "int8": "swag.FormatInt8",
+ "int16": "swag.FormatInt16",
+ "int32": "swag.FormatInt32",
+ "int64": "swag.FormatInt64",
+ "uint8": "swag.FormatUint8",
+ "uint16": "swag.FormatUint16",
+ "uint32": "swag.FormatUint32",
+ "uint64": "swag.FormatUint64",
+}
+
+// typeMapping contains a mapping of type name to go type
+var typeMapping = map[string]string{
+ // Standard formats with native, straightforward, mapping
+ "string": "string",
+ "boolean": "bool",
+ "integer": "int64",
+ "number": "float64",
+ // For file producers
+ "file": "runtime.File",
+}
+
+// formatMapping contains a type-specific version of mapping of format to go type
+var formatMapping = map[string]map[string]string{
+ "number": {
+ "double": "float64",
+ "float": "float32",
+ "int": "int64",
+ "int8": "int8",
+ "int16": "int16",
+ "int32": "int32",
+ "int64": "int64",
+ "uint": "uint64",
+ "uint8": "uint8",
+ "uint16": "uint16",
+ "uint32": "uint32",
+ "uint64": "uint64",
+ },
+ "integer": {
+ "int": "int64",
+ "int8": "int8",
+ "int16": "int16",
+ "int32": "int32",
+ "int64": "int64",
+ "uint": "uint64",
+ "uint8": "uint8",
+ "uint16": "uint16",
+ "uint32": "uint32",
+ "uint64": "uint64",
+ },
+ "string": {
+ "char": "rune",
+ // Extended format registry from go-openapi/strfmt.
+ // Currently, 23 such formats are supported (default strftm registry),
+ // plus the following aliases:
+ // - "datetime" alias for the more official "date-time"
+ // - "objectid" and "ObjectId" aliases for "bsonobjectid"
+ "binary": "io.ReadCloser",
+ "byte": "strfmt.Base64",
+ "creditcard": "strfmt.CreditCard",
+ "date": "strfmt.Date",
+ "date-time": "strfmt.DateTime",
+ "datetime": "strfmt.DateTime",
+ "duration": "strfmt.Duration",
+ "email": "strfmt.Email",
+ "hexcolor": "strfmt.HexColor",
+ "hostname": "strfmt.Hostname",
+ "ipv4": "strfmt.IPv4",
+ "ipv6": "strfmt.IPv6",
+ "isbn": "strfmt.ISBN",
+ "isbn10": "strfmt.ISBN10",
+ "isbn13": "strfmt.ISBN13",
+ "mac": "strfmt.MAC",
+ "bsonobjectid": "strfmt.ObjectId",
+ "objectid": "strfmt.ObjectId",
+ "ObjectId": "strfmt.ObjectId", // NOTE: does it work with uppercase?
+ "password": "strfmt.Password",
+ "rgbcolor": "strfmt.RGBColor",
+ "ssn": "strfmt.SSN",
+ "uri": "strfmt.URI",
+ "uuid": "strfmt.UUID",
+ "uuid3": "strfmt.UUID3",
+ "uuid4": "strfmt.UUID4",
+ "uuid5": "strfmt.UUID5",
+ // For file producers
+ "file": "runtime.File",
+ },
+}
+
+// go primitive types
+var primitives = map[string]struct{}{
+ "bool": {},
+ "byte": {},
+ "[]byte": {},
+ "complex64": {},
+ "complex128": {},
+ "float32": {},
+ "float64": {},
+ "int": {},
+ "int8": {},
+ "int16": {},
+ "int32": {},
+ "int64": {},
+ "rune": {},
+ "string": {},
+ "uint": {},
+ "uint8": {},
+ "uint16": {},
+ "uint32": {},
+ "uint64": {},
+}
+
+// Formats with a custom formatter.
+// Currently, 23 such formats are supported
+var customFormatters = map[string]struct{}{
+ "strfmt.Base64": {},
+ "strfmt.CreditCard": {},
+ "strfmt.Date": {},
+ "strfmt.DateTime": {},
+ "strfmt.Duration": {},
+ "strfmt.Email": {},
+ "strfmt.HexColor": {},
+ "strfmt.Hostname": {},
+ "strfmt.IPv4": {},
+ "strfmt.IPv6": {},
+ "strfmt.ISBN": {},
+ "strfmt.ISBN10": {},
+ "strfmt.ISBN13": {},
+ "strfmt.MAC": {},
+ "strfmt.ObjectId": {},
+ "strfmt.Password": {},
+ "strfmt.RGBColor": {},
+ "strfmt.SSN": {},
+ "strfmt.URI": {},
+ "strfmt.UUID": {},
+ "strfmt.UUID3": {},
+ "strfmt.UUID4": {},
+ "strfmt.UUID5": {},
+ // the following interfaces do not generate validations
+ "io.ReadCloser": {}, // for "format": "binary" (server side)
+ "io.Writer": {}, // for "format": "binary" (client side)
+ // NOTE: runtime.File is not a customFormatter
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go b/vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go
new file mode 100644
index 000000000..7e2a4f1c0
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go
@@ -0,0 +1,50 @@
+//go:build !windows
+// +build !windows
+
+package generator
+
+import (
+ "log"
+ "plugin"
+ "text/template"
+)
+
+type GenOpts struct {
+ GenOptsCommon
+ TemplatePlugin string
+}
+
+func (g *GenOpts) setTemplates() error {
+ if g.TemplatePlugin != "" {
+ if err := g.templates.LoadPlugin(g.TemplatePlugin); err != nil {
+ return err
+ }
+ }
+
+ return g.GenOptsCommon.setTemplates()
+}
+
+// LoadPlugin will load the named plugin and inject its functions into the funcMap
+//
+// The plugin must implement a function matching the signature:
+// `func AddFuncs(f template.FuncMap)`
+// which can add any number of functions to the template repository funcMap.
+// Any existing sprig or go-swagger templates with the same name will be overridden.
+func (t *Repository) LoadPlugin(pluginPath string) error {
+ log.Printf("Attempting to load template plugin: %s", pluginPath)
+
+ p, err := plugin.Open(pluginPath)
+
+ if err != nil {
+ return err
+ }
+
+ f, err := p.Lookup("AddFuncs")
+
+ if err != nil {
+ return err
+ }
+
+ f.(func(template.FuncMap))(t.funcs)
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/genopts_win.go b/vendor/github.com/go-swagger/go-swagger/generator/genopts_win.go
new file mode 100644
index 000000000..6dfbc1b27
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/genopts_win.go
@@ -0,0 +1,12 @@
+//go:build windows
+// +build windows
+
+package generator
+
+type GenOpts struct {
+ GenOptsCommon
+}
+
+func (g *GenOpts) setTemplates() error {
+ return g.GenOptsCommon.setTemplates()
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/language.go b/vendor/github.com/go-swagger/go-swagger/generator/language.go
new file mode 100644
index 000000000..01c7a318e
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/language.go
@@ -0,0 +1,440 @@
+package generator
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ goruntime "runtime"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/swag"
+ "golang.org/x/tools/imports"
+)
+
+var (
+ // DefaultLanguageFunc defines the default generation language
+ DefaultLanguageFunc func() *LanguageOpts
+
+ moduleRe *regexp.Regexp
+)
+
+func initLanguage() {
+ DefaultLanguageFunc = GoLangOpts
+
+ moduleRe = regexp.MustCompile(`module[ \t]+([^\s]+)`)
+}
+
+// LanguageOpts to describe a language to the code generator
+type LanguageOpts struct {
+ ReservedWords []string
+ BaseImportFunc func(string) string `json:"-"`
+ ImportsFunc func(map[string]string) string `json:"-"`
+ ArrayInitializerFunc func(interface{}) (string, error) `json:"-"`
+ reservedWordsSet map[string]struct{}
+ initialized bool
+ formatFunc func(string, []byte) ([]byte, error)
+ fileNameFunc func(string) string // language specific source file naming rules
+ dirNameFunc func(string) string // language specific directory naming rules
+}
+
+// Init the language option
+func (l *LanguageOpts) Init() {
+ if l.initialized {
+ return
+ }
+ l.initialized = true
+ l.reservedWordsSet = make(map[string]struct{})
+ for _, rw := range l.ReservedWords {
+ l.reservedWordsSet[rw] = struct{}{}
+ }
+}
+
+// MangleName makes sure a reserved word gets a safe name
+func (l *LanguageOpts) MangleName(name, suffix string) string {
+ if _, ok := l.reservedWordsSet[swag.ToFileName(name)]; !ok {
+ return name
+ }
+ return strings.Join([]string{name, suffix}, "_")
+}
+
+// MangleVarName makes sure a reserved word gets a safe name
+func (l *LanguageOpts) MangleVarName(name string) string {
+ nm := swag.ToVarName(name)
+ if _, ok := l.reservedWordsSet[nm]; !ok {
+ return nm
+ }
+ return nm + "Var"
+}
+
+// MangleFileName makes sure a file name gets a safe name
+func (l *LanguageOpts) MangleFileName(name string) string {
+ if l.fileNameFunc != nil {
+ return l.fileNameFunc(name)
+ }
+ return swag.ToFileName(name)
+}
+
+// ManglePackageName makes sure a package gets a safe name.
+// In case of a file system path (e.g. name contains "/" or "\" on Windows), this return only the last element.
+func (l *LanguageOpts) ManglePackageName(name, suffix string) string {
+ if name == "" {
+ return suffix
+ }
+ if l.dirNameFunc != nil {
+ name = l.dirNameFunc(name)
+ }
+ pth := filepath.ToSlash(filepath.Clean(name)) // preserve path
+ pkg := importAlias(pth) // drop path
+ return l.MangleName(swag.ToFileName(prefixForName(pkg)+pkg), suffix)
+}
+
+// ManglePackagePath makes sure a full package path gets a safe name.
+// Only the last part of the path is altered.
+func (l *LanguageOpts) ManglePackagePath(name string, suffix string) string {
+ if name == "" {
+ return suffix
+ }
+ target := filepath.ToSlash(filepath.Clean(name)) // preserve path
+ parts := strings.Split(target, "/")
+ parts[len(parts)-1] = l.ManglePackageName(parts[len(parts)-1], suffix)
+ return strings.Join(parts, "/")
+}
+
+// FormatContent formats a file with a language specific formatter
+func (l *LanguageOpts) FormatContent(name string, content []byte) ([]byte, error) {
+ if l.formatFunc != nil {
+ return l.formatFunc(name, content)
+ }
+ return content, nil
+}
+
+// imports generate the code to import some external packages, possibly aliased
+func (l *LanguageOpts) imports(imports map[string]string) string {
+ if l.ImportsFunc != nil {
+ return l.ImportsFunc(imports)
+ }
+ return ""
+}
+
+// arrayInitializer builds a litteral array
+func (l *LanguageOpts) arrayInitializer(data interface{}) (string, error) {
+ if l.ArrayInitializerFunc != nil {
+ return l.ArrayInitializerFunc(data)
+ }
+ return "", nil
+}
+
+// baseImport figures out the base path to generate import statements
+func (l *LanguageOpts) baseImport(tgt string) string {
+ if l.BaseImportFunc != nil {
+ return l.BaseImportFunc(tgt)
+ }
+ debugLog("base import func is nil")
+ return ""
+}
+
+// GoLangOpts for rendering items as golang code
+func GoLangOpts() *LanguageOpts {
+ var goOtherReservedSuffixes = map[string]bool{
+ // see:
+ // https://golang.org/src/go/build/syslist.go
+ // https://golang.org/doc/install/source#environment
+
+ // goos
+ "aix": true,
+ "android": true,
+ "darwin": true,
+ "dragonfly": true,
+ "freebsd": true,
+ "hurd": true,
+ "illumos": true,
+ "js": true,
+ "linux": true,
+ "nacl": true,
+ "netbsd": true,
+ "openbsd": true,
+ "plan9": true,
+ "solaris": true,
+ "windows": true,
+ "zos": true,
+
+ // arch
+ "386": true,
+ "amd64": true,
+ "amd64p32": true,
+ "arm": true,
+ "armbe": true,
+ "arm64": true,
+ "arm64be": true,
+ "mips": true,
+ "mipsle": true,
+ "mips64": true,
+ "mips64le": true,
+ "mips64p32": true,
+ "mips64p32le": true,
+ "ppc": true,
+ "ppc64": true,
+ "ppc64le": true,
+ "riscv": true,
+ "riscv64": true,
+ "s390": true,
+ "s390x": true,
+ "sparc": true,
+ "sparc64": true,
+ "wasm": true,
+
+ // other reserved suffixes
+ "test": true,
+ }
+
+ opts := new(LanguageOpts)
+ opts.ReservedWords = []string{
+ "break", "default", "func", "interface", "select",
+ "case", "defer", "go", "map", "struct",
+ "chan", "else", "goto", "package", "switch",
+ "const", "fallthrough", "if", "range", "type",
+ "continue", "for", "import", "return", "var",
+ }
+
+ opts.formatFunc = func(ffn string, content []byte) ([]byte, error) {
+ opts := new(imports.Options)
+ opts.TabIndent = true
+ opts.TabWidth = 2
+ opts.Fragment = true
+ opts.Comments = true
+ return imports.Process(ffn, content, opts)
+ }
+
+ opts.fileNameFunc = func(name string) string {
+ // whenever a generated file name ends with a suffix
+ // that is meaningful to go build, adds a "swagger"
+ // suffix
+ parts := strings.Split(swag.ToFileName(name), "_")
+ if goOtherReservedSuffixes[parts[len(parts)-1]] {
+ // file name ending with a reserved arch or os name
+ // are appended an innocuous suffix "swagger"
+ parts = append(parts, "swagger")
+ }
+ return strings.Join(parts, "_")
+ }
+
+ opts.dirNameFunc = func(name string) string {
+ // whenever a generated directory name is a special
+ // golang directory, append an innocuous suffix
+ switch name {
+ case "vendor", "internal":
+ return strings.Join([]string{name, "swagger"}, "_")
+ }
+ return name
+ }
+
+ opts.ImportsFunc = func(imports map[string]string) string {
+ if len(imports) == 0 {
+ return ""
+ }
+ result := make([]string, 0, len(imports))
+ for k, v := range imports {
+ _, name := path.Split(v)
+ if name != k {
+ result = append(result, fmt.Sprintf("\t%s %q", k, v))
+ } else {
+ result = append(result, fmt.Sprintf("\t%q", v))
+ }
+ }
+ sort.Strings(result)
+ return strings.Join(result, "\n")
+ }
+
+ opts.ArrayInitializerFunc = func(data interface{}) (string, error) {
+ // ArrayInitializer constructs a Go literal initializer from interface{} literals.
+ // e.g. []interface{}{"a", "b"} is transformed in {"a","b",}
+ // e.g. map[string]interface{}{ "a": "x", "b": "y"} is transformed in {"a":"x","b":"y",}.
+ //
+ // NOTE: this is currently used to construct simple slice intializers for default values.
+ // This allows for nicer slice initializers for slices of primitive types and avoid systematic use for json.Unmarshal().
+ b, err := json.Marshal(data)
+ if err != nil {
+ return "", err
+ }
+ return strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(string(b), "}", ",}"), "[", "{"), "]", ",}"), "{,}", "{}"), nil
+ }
+
+ opts.BaseImportFunc = func(tgt string) string {
+ tgt = filepath.Clean(tgt)
+ // On Windows, filepath.Abs("") behaves differently than on Unix.
+ // Windows: yields an error, since Abs() does not know the volume.
+ // UNIX: returns current working directory
+ if tgt == "" {
+ tgt = "."
+ }
+ tgtAbsPath, err := filepath.Abs(tgt)
+ if err != nil {
+ log.Fatalf("could not evaluate base import path with target \"%s\": %v", tgt, err)
+ }
+
+ var tgtAbsPathExtended string
+ tgtAbsPathExtended, err = filepath.EvalSymlinks(tgtAbsPath)
+ if err != nil {
+ log.Fatalf("could not evaluate base import path with target \"%s\" (with symlink resolution): %v", tgtAbsPath, err)
+ }
+
+ gopath := os.Getenv("GOPATH")
+ if gopath == "" {
+ homeDir, herr := os.UserHomeDir()
+ if herr != nil {
+ log.Fatalln(herr)
+ }
+ gopath = filepath.Join(homeDir, "go")
+ }
+
+ var pth string
+ for _, gp := range filepath.SplitList(gopath) {
+ if _, derr := os.Stat(filepath.Join(gp, "src")); os.IsNotExist(derr) {
+ continue
+ }
+ // EvalSymLinks also calls the Clean
+ gopathExtended, er := filepath.EvalSymlinks(gp)
+ if er != nil {
+ panic(er)
+ }
+ gopathExtended = filepath.Join(gopathExtended, "src")
+ gp = filepath.Join(gp, "src")
+
+ // At this stage we have expanded and unexpanded target path. GOPATH is fully expanded.
+ // Expanded means symlink free.
+ // We compare both types of targetpath<s> with gopath.
+ // If any one of them coincides with gopath , it is imperative that
+ // target path lies inside gopath. How?
+ // - Case 1: Irrespective of symlinks paths coincide. Both non-expanded paths.
+ // - Case 2: Symlink in target path points to location inside GOPATH. (Expanded Target Path)
+ // - Case 3: Symlink in target path points to directory outside GOPATH (Unexpanded target path)
+
+ // Case 1: - Do nothing case. If non-expanded paths match just generate base import path as if
+ // there are no symlinks.
+
+ // Case 2: - Symlink in target path points to location inside GOPATH. (Expanded Target Path)
+ // First if will fail. Second if will succeed.
+
+ // Case 3: - Symlink in target path points to directory outside GOPATH (Unexpanded target path)
+ // First if will succeed and break.
+
+ // compares non expanded path for both
+ if ok, relativepath := checkPrefixAndFetchRelativePath(tgtAbsPath, gp); ok {
+ pth = relativepath
+ break
+ }
+
+ // Compares non-expanded target path
+ if ok, relativepath := checkPrefixAndFetchRelativePath(tgtAbsPath, gopathExtended); ok {
+ pth = relativepath
+ break
+ }
+
+ // Compares expanded target path.
+ if ok, relativepath := checkPrefixAndFetchRelativePath(tgtAbsPathExtended, gopathExtended); ok {
+ pth = relativepath
+ break
+ }
+
+ }
+
+ mod, goModuleAbsPath, err := tryResolveModule(tgtAbsPath)
+ switch {
+ case err != nil:
+ log.Fatalf("Failed to resolve module using go.mod file: %s", err)
+ case mod != "":
+ relTgt := relPathToRelGoPath(goModuleAbsPath, tgtAbsPath)
+ if !strings.HasSuffix(mod, relTgt) {
+ return filepath.ToSlash(mod + relTgt)
+ }
+ return filepath.ToSlash(mod)
+ }
+
+ if pth == "" {
+ log.Fatalln("target must reside inside a location in the $GOPATH/src or be a module")
+ }
+ return filepath.ToSlash(pth)
+ }
+ opts.Init()
+ return opts
+}
+
+// resolveGoModFile walks up the directory tree starting from 'dir' until it
+// finds a go.mod file. If go.mod is found it will return the related file
+// object. If no go.mod file is found it will return an error.
+func resolveGoModFile(dir string) (*os.File, string, error) {
+ goModPath := filepath.Join(dir, "go.mod")
+ f, err := os.Open(goModPath)
+ if err != nil {
+ if os.IsNotExist(err) && dir != filepath.Dir(dir) {
+ return resolveGoModFile(filepath.Dir(dir))
+ }
+ return nil, "", err
+ }
+ return f, dir, nil
+}
+
+// relPathToRelGoPath takes a relative os path and returns the relative go
+// package path. For unix nothing will change but for windows \ will be
+// converted to /.
+func relPathToRelGoPath(modAbsPath, absPath string) string {
+ if absPath == "." {
+ return ""
+ }
+
+ path := strings.TrimPrefix(absPath, modAbsPath)
+ pathItems := strings.Split(path, string(filepath.Separator))
+ return strings.Join(pathItems, "/")
+}
+
+func tryResolveModule(baseTargetPath string) (string, string, error) {
+ f, goModAbsPath, err := resolveGoModFile(baseTargetPath)
+ switch {
+ case os.IsNotExist(err):
+ return "", "", nil
+ case err != nil:
+ return "", "", err
+ }
+
+ src, err := io.ReadAll(f)
+ if err != nil {
+ return "", "", err
+ }
+
+ match := moduleRe.FindSubmatch(src)
+ if len(match) != 2 {
+ return "", "", nil
+ }
+
+ return string(match[1]), goModAbsPath, nil
+}
+
+// 1. Checks if the child path and parent path coincide.
+// 2. If they do return child path relative to parent path.
+// 3. Everything else return false
+func checkPrefixAndFetchRelativePath(childpath string, parentpath string) (bool, string) {
+ // Windows (local) file systems - NTFS, as well as FAT and variants
+ // are case insensitive.
+ cp, pp := childpath, parentpath
+ if goruntime.GOOS == "windows" {
+ cp = strings.ToLower(cp)
+ pp = strings.ToLower(pp)
+ }
+
+ if strings.HasPrefix(cp, pp) {
+ pth, err := filepath.Rel(parentpath, childpath)
+ if err != nil {
+ log.Fatalln(err)
+ }
+ return true, pth
+ }
+
+ return false, ""
+
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/media.go b/vendor/github.com/go-swagger/go-swagger/generator/media.go
new file mode 100644
index 000000000..f9dad9fa4
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/media.go
@@ -0,0 +1,191 @@
+package generator
+
+import (
+ "regexp"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/swag"
+)
+
+const jsonSerializer = "json"
+
+var mediaTypeNames = map[*regexp.Regexp]string{
+ regexp.MustCompile("application/.*json"): jsonSerializer,
+ regexp.MustCompile("application/.*yaml"): "yaml",
+ regexp.MustCompile("application/.*protobuf"): "protobuf",
+ regexp.MustCompile("application/.*capnproto"): "capnproto",
+ regexp.MustCompile("application/.*thrift"): "thrift",
+ regexp.MustCompile("(?:application|text)/.*xml"): "xml",
+ regexp.MustCompile("text/.*markdown"): "markdown",
+ regexp.MustCompile("text/.*html"): "html",
+ regexp.MustCompile("text/.*csv"): "csv",
+ regexp.MustCompile("text/.*tsv"): "tsv",
+ regexp.MustCompile("text/.*javascript"): "js",
+ regexp.MustCompile("text/.*css"): "css",
+ regexp.MustCompile("text/.*plain"): "txt",
+ regexp.MustCompile("application/.*octet-stream"): "bin",
+ regexp.MustCompile("application/.*tar"): "tar",
+ regexp.MustCompile("application/.*gzip"): "gzip",
+ regexp.MustCompile("application/.*gz"): "gzip",
+ regexp.MustCompile("application/.*raw-stream"): "bin",
+ regexp.MustCompile("application/x-www-form-urlencoded"): "urlform",
+ regexp.MustCompile("application/javascript"): "txt",
+ regexp.MustCompile("multipart/form-data"): "multipartform",
+ regexp.MustCompile("image/.*"): "bin",
+ regexp.MustCompile("audio/.*"): "bin",
+ regexp.MustCompile("application/pdf"): "bin",
+}
+
+var knownProducers = map[string]string{
+ jsonSerializer: "runtime.JSONProducer()",
+ "yaml": "yamlpc.YAMLProducer()",
+ "xml": "runtime.XMLProducer()",
+ "txt": "runtime.TextProducer()",
+ "bin": "runtime.ByteStreamProducer()",
+ "urlform": "runtime.DiscardProducer",
+ "multipartform": "runtime.DiscardProducer",
+}
+
+var knownConsumers = map[string]string{
+ jsonSerializer: "runtime.JSONConsumer()",
+ "yaml": "yamlpc.YAMLConsumer()",
+ "xml": "runtime.XMLConsumer()",
+ "txt": "runtime.TextConsumer()",
+ "bin": "runtime.ByteStreamConsumer()",
+ "urlform": "runtime.DiscardConsumer",
+ "multipartform": "runtime.DiscardConsumer",
+}
+
+func wellKnownMime(tn string) (string, bool) {
+ for k, v := range mediaTypeNames {
+ if k.MatchString(tn) {
+ return v, true
+ }
+ }
+ return "", false
+}
+
+func mediaMime(orig string) string {
+ return strings.SplitN(orig, ";", 2)[0]
+}
+
+func mediaParameters(orig string) string {
+ parts := strings.SplitN(orig, ";", 2)
+ if len(parts) < 2 {
+ return ""
+ }
+ return parts[1]
+}
+
+func (a *appGenerator) makeSerializers(mediaTypes []string, known func(string) (string, bool)) (GenSerGroups, bool) {
+ supportsJSON := false
+ uniqueSerializers := make(map[string]*GenSerializer, len(mediaTypes))
+ uniqueSerializerGroups := make(map[string]*GenSerGroup, len(mediaTypes))
+
+ // build all required serializers
+ for _, media := range mediaTypes {
+ key := mediaMime(media)
+ nm, ok := wellKnownMime(key)
+ if !ok {
+ // keep this serializer named, even though its implementation is empty (cf. #1557)
+ nm = key
+ }
+ name := swag.ToJSONName(nm)
+ impl, _ := known(name)
+
+ ser, ok := uniqueSerializers[key]
+ if !ok {
+ ser = &GenSerializer{
+ AppName: a.Name,
+ ReceiverName: a.Receiver,
+ Name: name,
+ MediaType: key,
+ Implementation: impl,
+ Parameters: []string{},
+ }
+ uniqueSerializers[key] = ser
+ }
+ // provide all known parameters (currently unused by codegen templates)
+ if params := strings.TrimSpace(mediaParameters(media)); params != "" {
+ found := false
+ for _, p := range ser.Parameters {
+ if params == p {
+ found = true
+ break
+ }
+ }
+ if !found {
+ ser.Parameters = append(ser.Parameters, params)
+ }
+ }
+
+ uniqueSerializerGroups[name] = &GenSerGroup{
+ GenSerializer: GenSerializer{
+ AppName: a.Name,
+ ReceiverName: a.Receiver,
+ Name: name,
+ Implementation: impl,
+ },
+ }
+ }
+
+ if len(uniqueSerializers) == 0 {
+ impl, _ := known(jsonSerializer)
+ uniqueSerializers[runtime.JSONMime] = &GenSerializer{
+ AppName: a.Name,
+ ReceiverName: a.Receiver,
+ Name: jsonSerializer,
+ MediaType: runtime.JSONMime,
+ Implementation: impl,
+ Parameters: []string{},
+ }
+ uniqueSerializerGroups[jsonSerializer] = &GenSerGroup{
+ GenSerializer: GenSerializer{
+ AppName: a.Name,
+ ReceiverName: a.Receiver,
+ Name: jsonSerializer,
+ Implementation: impl,
+ },
+ }
+ supportsJSON = true
+ }
+
+ // group serializers by consumer/producer to serve several mime media types
+ serializerGroups := make(GenSerGroups, 0, len(uniqueSerializers))
+
+ for _, group := range uniqueSerializerGroups {
+ if group.Name == jsonSerializer {
+ supportsJSON = true
+ }
+ serializers := make(GenSerializers, 0, len(uniqueSerializers))
+ for _, ser := range uniqueSerializers {
+ if group.Name == ser.Name {
+ sort.Strings(ser.Parameters)
+ serializers = append(serializers, *ser)
+ }
+ }
+ sort.Sort(serializers)
+ group.AllSerializers = serializers // provides the full list of mime media types for this serializer group
+ serializerGroups = append(serializerGroups, *group)
+ }
+ sort.Sort(serializerGroups)
+ return serializerGroups, supportsJSON
+}
+
+func (a *appGenerator) makeConsumes() (GenSerGroups, bool) {
+ // builds a codegen struct from all consumes in the spec
+ return a.makeSerializers(a.Analyzed.RequiredConsumes(), func(media string) (string, bool) {
+ c, ok := knownConsumers[media]
+ return c, ok
+ })
+}
+
+func (a *appGenerator) makeProduces() (GenSerGroups, bool) {
+ // builds a codegen struct from all produces in the spec
+ return a.makeSerializers(a.Analyzed.RequiredProduces(), func(media string) (string, bool) {
+ p, ok := knownProducers[media]
+ return p, ok
+ })
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/model.go b/vendor/github.com/go-swagger/go-swagger/generator/model.go
new file mode 100644
index 000000000..132927d48
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/model.go
@@ -0,0 +1,2118 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "errors"
+ "fmt"
+ "log"
+ "path"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+const asMethod = "()"
+
+/*
+Rewrite specification document first:
+
+* anonymous objects
+* tuples
+* extensible objects (properties + additionalProperties)
+* AllOfs when they match the rewrite criteria (not a nullable allOf)
+
+Find string enums and generate specialized idiomatic enum with them
+
+Every action that happens tracks the path which is a linked list of refs
+
+
+*/
+
+// GenerateModels generates all model files for some schema definitions
+func GenerateModels(modelNames []string, opts *GenOpts) error {
+ // overide any default or incompatible options setting
+ opts.IncludeModel = true
+ opts.IgnoreOperations = true
+ opts.ExistingModels = ""
+ opts.IncludeHandler = false
+ opts.IncludeMain = false
+ opts.IncludeSupport = false
+ generator, err := newAppGenerator("", modelNames, nil, opts)
+ if err != nil {
+ return err
+ }
+ return generator.Generate()
+}
+
+// GenerateDefinition generates a single model file for some schema definitions
+func GenerateDefinition(modelNames []string, opts *GenOpts) error {
+ if err := opts.CheckOpts(); err != nil {
+ return err
+ }
+
+ if err := opts.setTemplates(); err != nil {
+ return err
+ }
+
+ specDoc, _, err := opts.analyzeSpec()
+ if err != nil {
+ return err
+ }
+
+ modelNames = pruneEmpty(modelNames)
+ if len(modelNames) == 0 {
+ for k := range specDoc.Spec().Definitions {
+ modelNames = append(modelNames, k)
+ }
+ }
+
+ for _, modelName := range modelNames {
+ // lookup schema
+ model, ok := specDoc.Spec().Definitions[modelName]
+ if !ok {
+ return fmt.Errorf("model %q not found in definitions given by %q", modelName, opts.Spec)
+ }
+
+ // generate files
+ generator := definitionGenerator{
+ Name: modelName,
+ Model: model,
+ SpecDoc: specDoc,
+ Target: filepath.Join(
+ opts.Target,
+ filepath.FromSlash(opts.LanguageOpts.ManglePackagePath(opts.ModelPackage, ""))),
+ opts: opts,
+ }
+
+ if err := generator.Generate(); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+type definitionGenerator struct {
+ Name string
+ Model spec.Schema
+ SpecDoc *loads.Document
+ Target string
+ opts *GenOpts
+}
+
+func (m *definitionGenerator) Generate() error {
+
+ mod, err := makeGenDefinition(m.Name, m.Target, m.Model, m.SpecDoc, m.opts)
+ if err != nil {
+ return fmt.Errorf("could not generate definitions for model %s on target %s: %v", m.Name, m.Target, err)
+ }
+
+ if m.opts.DumpData {
+ return dumpData(swag.ToDynamicJSON(mod))
+ }
+
+ if m.opts.IncludeModel {
+ log.Println("including additional model")
+ if err := m.generateModel(mod); err != nil {
+ return fmt.Errorf("could not generate model: %v", err)
+ }
+ }
+ log.Println("generated model", m.Name)
+
+ return nil
+}
+
+func (m *definitionGenerator) generateModel(g *GenDefinition) error {
+ debugLog("rendering definitions for %+v", *g)
+ return m.opts.renderDefinition(g)
+}
+
+func makeGenDefinition(name, pkg string, schema spec.Schema, specDoc *loads.Document, opts *GenOpts) (*GenDefinition, error) {
+ gd, err := makeGenDefinitionHierarchy(name, pkg, "", schema, specDoc, opts)
+
+ if err == nil && gd != nil {
+ // before yielding the schema to the renderer, we check if the top-level Validate method gets some content
+ // this means that the immediate content of the top level definitions has at least one validation.
+ //
+ // If none is found at this level and that no special case where no Validate() method is exposed at all
+ // (e.g. io.ReadCloser and interface{} types and their aliases), then there is an empty Validate() method which
+ // just return nil (the object abides by the runtime.Validatable interface, but knows it has nothing to validate).
+ //
+ // We do this at the top level because of the possibility of aliased types which always bubble up validation to types which
+ // are referring to them. This results in correct but inelegant code with empty validations.
+ gd.GenSchema.HasValidations = shallowValidationLookup(gd.GenSchema)
+ }
+ return gd, err
+}
+
+func shallowValidationLookup(sch GenSchema) bool {
+ // scan top level need for validations
+ //
+ // NOTE: this supersedes the previous NeedsValidation flag
+ // With the introduction of this shallow lookup, it is no more necessary
+ // to establish a distinction between HasValidations (e.g. carries on validations)
+ // and NeedsValidation (e.g. should have a Validate method with something in it).
+ // The latter was almost not used anyhow.
+
+ if sch.HasAdditionalProperties && sch.AdditionalProperties == nil {
+ log.Printf("warning: schema for additional properties in schema %q is empty. skipped", sch.Name)
+ }
+
+ if sch.IsArray && sch.HasValidations {
+ return true
+ }
+ if sch.IsStream || sch.IsInterface { // these types have no validation - aliased types on those do not implement the Validatable interface
+ return false
+ }
+ if sch.Required || hasFormatValidation(sch.resolvedType) {
+ return true
+ }
+ if sch.HasStringValidations() || sch.HasNumberValidations() || sch.HasEnum() || len(sch.ItemsEnum) > 0 || sch.HasObjectValidations() {
+ return true
+ }
+ for _, a := range sch.AllOf {
+ if a.HasValidations {
+ return true
+ }
+ }
+ for _, p := range sch.Properties {
+ // Using a base type within another structure triggers validation of the base type.
+ // The discriminator property in the base type definition itself does not.
+ if (p.HasValidations || p.Required) && !(sch.IsBaseType && p.Name == sch.DiscriminatorField) || (p.IsAliased || p.IsComplexObject) && !(p.IsInterface || p.IsStream) {
+ return true
+ }
+ }
+ if sch.IsTuple && (sch.AdditionalItems != nil && (sch.AdditionalItems.HasValidations || sch.AdditionalItems.Required)) {
+ return true
+ }
+ if sch.HasAdditionalProperties && sch.AdditionalProperties != nil && (sch.AdditionalProperties.IsInterface || sch.AdditionalProperties.IsStream) {
+ return false
+ }
+
+ if sch.HasAdditionalProperties && sch.AdditionalProperties != nil && (sch.AdditionalProperties.HasValidations || sch.AdditionalProperties.Required || sch.AdditionalProperties.IsAliased && !(sch.AdditionalProperties.IsInterface || sch.AdditionalProperties.IsStream)) {
+ return true
+ }
+
+ if sch.IsAliased && (sch.IsPrimitive && sch.HasValidations) { // non primitive aliased have either other attributes with validation (above) or shall not validate
+ return true
+ }
+ if sch.HasBaseType || sch.IsSubType {
+ return true
+ }
+ return false
+}
+
+func isExternal(schema spec.Schema) bool {
+ extType, ok := hasExternalType(schema.Extensions)
+ return ok && !extType.Embedded
+}
+
+func makeGenDefinitionHierarchy(name, pkg, container string, schema spec.Schema, specDoc *loads.Document, opts *GenOpts) (*GenDefinition, error) {
+ // Check if model is imported from external package using x-go-type
+ receiver := "m"
+ // models are resolved in the current package
+ modelPkg := opts.LanguageOpts.ManglePackageName(path.Base(filepath.ToSlash(pkg)), "definitions")
+ resolver := newTypeResolver("", "", specDoc).withDefinitionPackage(modelPkg)
+ resolver.ModelName = name
+ analyzed := analysis.New(specDoc.Spec())
+
+ di := discriminatorInfo(analyzed)
+
+ pg := schemaGenContext{
+ Path: "",
+ Name: name,
+ Receiver: receiver,
+ IndexVar: "i",
+ ValueExpr: receiver,
+ Schema: schema,
+ Required: false,
+ TypeResolver: resolver,
+ Named: true,
+ ExtraSchemas: make(map[string]GenSchema),
+ Discrimination: di,
+ Container: container,
+ IncludeValidator: opts.IncludeValidator,
+ IncludeModel: opts.IncludeModel,
+ StrictAdditionalProperties: opts.StrictAdditionalProperties,
+ WithXML: opts.WithXML,
+ StructTags: opts.StructTags,
+ }
+ if err := pg.makeGenSchema(); err != nil {
+ return nil, fmt.Errorf("could not generate schema for %s: %v", name, err)
+ }
+ dsi, ok := di.Discriminators["#/definitions/"+name]
+ if ok {
+ // when these 2 are true then the schema will render as an interface
+ pg.GenSchema.IsBaseType = true
+ pg.GenSchema.IsExported = true
+ pg.GenSchema.DiscriminatorField = dsi.FieldName
+
+ if pg.GenSchema.Discriminates == nil {
+ pg.GenSchema.Discriminates = make(map[string]string)
+ }
+ pg.GenSchema.Discriminates[name] = dsi.GoType
+ pg.GenSchema.DiscriminatorValue = name
+
+ for _, v := range dsi.Children {
+ pg.GenSchema.Discriminates[v.FieldValue] = v.GoType
+ }
+
+ for j := range pg.GenSchema.Properties {
+ if !strings.HasSuffix(pg.GenSchema.Properties[j].ValueExpression, asMethod) {
+ pg.GenSchema.Properties[j].ValueExpression += asMethod
+ }
+ }
+ }
+
+ dse, ok := di.Discriminated["#/definitions/"+name]
+ if ok {
+ pg.GenSchema.DiscriminatorField = dse.FieldName
+ pg.GenSchema.DiscriminatorValue = dse.FieldValue
+ pg.GenSchema.IsSubType = true
+ knownProperties := make(map[string]struct{})
+
+ // find the referenced definitions
+ // check if it has a discriminator defined
+ // when it has a discriminator get the schema and run makeGenSchema for it.
+ // replace the ref with this new genschema
+ swsp := specDoc.Spec()
+ for i, ss := range schema.AllOf {
+ if pg.GenSchema.AllOf == nil {
+ log.Printf("warning: resolved schema for subtype %q.AllOf[%d] is empty. skipped", name, i)
+ continue
+ }
+ ref := ss.Ref
+ for ref.String() != "" {
+ var rsch *spec.Schema
+ var err error
+ rsch, err = spec.ResolveRef(swsp, &ref)
+ if err != nil {
+ return nil, err
+ }
+ if rsch != nil && rsch.Ref.String() != "" {
+ ref = rsch.Ref
+ continue
+ }
+ ref = spec.Ref{}
+ if rsch != nil && rsch.Discriminator != "" {
+ gs, err := makeGenDefinitionHierarchy(strings.TrimPrefix(ss.Ref.String(), "#/definitions/"), pkg, pg.GenSchema.Name, *rsch, specDoc, opts)
+ if err != nil {
+ return nil, err
+ }
+ gs.GenSchema.IsBaseType = true
+ gs.GenSchema.IsExported = true
+ pg.GenSchema.AllOf[i] = gs.GenSchema
+ schPtr := &(pg.GenSchema.AllOf[i])
+ if schPtr.AdditionalItems != nil {
+ schPtr.AdditionalItems.IsBaseType = true
+ }
+ if schPtr.AdditionalProperties != nil {
+ schPtr.AdditionalProperties.IsBaseType = true
+ }
+ for j := range schPtr.Properties {
+ schPtr.Properties[j].IsBaseType = true
+ knownProperties[schPtr.Properties[j].Name] = struct{}{}
+ }
+ }
+ }
+ }
+
+ // dedupe the fields
+ alreadySeen := make(map[string]struct{})
+ for i, ss := range pg.GenSchema.AllOf {
+ var remainingProperties GenSchemaList
+ for _, p := range ss.Properties {
+ if _, ok := knownProperties[p.Name]; !ok || ss.IsBaseType {
+ if _, seen := alreadySeen[p.Name]; !seen {
+ remainingProperties = append(remainingProperties, p)
+ alreadySeen[p.Name] = struct{}{}
+ }
+ }
+ }
+ pg.GenSchema.AllOf[i].Properties = remainingProperties
+ }
+
+ }
+
+ defaultImports := map[string]string{
+ "errors": "github.com/go-openapi/errors",
+ "runtime": "github.com/go-openapi/runtime",
+ "swag": "github.com/go-openapi/swag",
+ "validate": "github.com/go-openapi/validate",
+ }
+
+ return &GenDefinition{
+ GenCommon: GenCommon{
+ Copyright: opts.Copyright,
+ TargetImportPath: opts.LanguageOpts.baseImport(opts.Target),
+ },
+ Package: modelPkg,
+ GenSchema: pg.GenSchema,
+ DependsOn: pg.Dependencies,
+ DefaultImports: defaultImports,
+ ExtraSchemas: gatherExtraSchemas(pg.ExtraSchemas),
+ Imports: findImports(&pg.GenSchema),
+ External: isExternal(schema),
+ }, nil
+}
+
+func findImports(sch *GenSchema) map[string]string {
+ imp := make(map[string]string, 20)
+ t := sch.resolvedType
+ if t.Pkg != "" && t.PkgAlias != "" {
+ imp[t.PkgAlias] = t.Pkg
+ }
+ if t.IsEmbedded && t.ElemType != nil {
+ if t.ElemType.Pkg != "" && t.ElemType.PkgAlias != "" {
+ imp[t.ElemType.PkgAlias] = t.ElemType.Pkg
+ }
+ }
+ if sch.Items != nil {
+ sub := findImports(sch.Items)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ if sch.AdditionalItems != nil {
+ sub := findImports(sch.AdditionalItems)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ if sch.Object != nil {
+ sub := findImports(sch.Object)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ if sch.Properties != nil {
+ for _, props := range sch.Properties {
+ p := props
+ sub := findImports(&p)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ }
+ if sch.AdditionalProperties != nil {
+ sub := findImports(sch.AdditionalProperties)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ if sch.AllOf != nil {
+ for _, props := range sch.AllOf {
+ p := props
+ sub := findImports(&p)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ }
+ for k, v := range sch.ExtraImports {
+ if k != "" && v != "" {
+ imp[k] = v
+ }
+ }
+
+ return imp
+}
+
+type schemaGenContext struct {
+ Required bool
+ AdditionalProperty bool
+ Untyped bool
+ Named bool
+ RefHandled bool
+ IsVirtual bool
+ IsTuple bool
+ IncludeValidator bool
+ IncludeModel bool
+ StrictAdditionalProperties bool
+ WithXML bool
+ Index int
+
+ Path string
+ Name string
+ ParamName string
+ Accessor string
+ Receiver string
+ IndexVar string
+ KeyVar string
+ ValueExpr string
+ Container string
+ Schema spec.Schema
+ TypeResolver *typeResolver
+ StructTags []string
+
+ GenSchema GenSchema
+ Dependencies []string // NOTE: Dependencies is actually set nowhere
+ ExtraSchemas map[string]GenSchema
+ Discriminator *discor
+ Discriminated *discee
+ Discrimination *discInfo
+
+ // force to use container in inlined definitions (for deconflicting)
+ UseContainerInName bool
+}
+
+func (sg *schemaGenContext) NewSliceBranch(schema *spec.Schema) *schemaGenContext {
+ debugLog("new slice branch %s (model: %s)", sg.Name, sg.TypeResolver.ModelName)
+ pg := sg.shallowClone()
+ indexVar := pg.IndexVar
+ if pg.Path == "" {
+ pg.Path = "strconv.Itoa(" + indexVar + ")"
+ } else {
+ pg.Path = pg.Path + "+ \".\" + strconv.Itoa(" + indexVar + ")"
+ }
+ // check who is parent, if it's a base type then rewrite the value expression
+ if sg.Discrimination != nil && sg.Discrimination.Discriminators != nil {
+ _, rewriteValueExpr := sg.Discrimination.Discriminators["#/definitions/"+sg.TypeResolver.ModelName]
+ if (pg.IndexVar == "i" && rewriteValueExpr) || sg.GenSchema.ElemType.IsBaseType {
+ if !sg.GenSchema.IsAliased {
+ pg.ValueExpr = sg.Receiver + "." + swag.ToJSONName(sg.GenSchema.Name) + "Field"
+ } else {
+ pg.ValueExpr = sg.Receiver
+ }
+ }
+ }
+ sg.GenSchema.IsBaseType = sg.GenSchema.ElemType.HasDiscriminator
+ pg.IndexVar = indexVar + "i"
+ pg.ValueExpr = pg.ValueExpr + "[" + indexVar + "]"
+ pg.Schema = *schema
+ pg.Required = false
+ if sg.IsVirtual {
+ pg.TypeResolver = sg.TypeResolver.NewWithModelName(sg.TypeResolver.ModelName)
+ }
+
+ // when this is an anonymous complex object, this needs to become a ref
+ return pg
+}
+
+func (sg *schemaGenContext) NewAdditionalItems(schema *spec.Schema) *schemaGenContext {
+ debugLog("new additional items\n")
+
+ pg := sg.shallowClone()
+ indexVar := pg.IndexVar
+ pg.Name = sg.Name + " items"
+ itemsLen := 0
+ if sg.Schema.Items != nil {
+ itemsLen = sg.Schema.Items.Len()
+ }
+ var mod string
+ if itemsLen > 0 {
+ mod = "+" + strconv.Itoa(itemsLen)
+ }
+ if pg.Path == "" {
+ pg.Path = "strconv.Itoa(" + indexVar + mod + ")"
+ } else {
+ pg.Path = pg.Path + "+ \".\" + strconv.Itoa(" + indexVar + mod + ")"
+ }
+ pg.IndexVar = indexVar
+ pg.ValueExpr = sg.ValueExpr + "." + pascalize(sg.GoName()) + "Items[" + indexVar + "]"
+ pg.Schema = spec.Schema{}
+ if schema != nil {
+ pg.Schema = *schema
+ }
+ pg.Required = false
+ return pg
+}
+
+func (sg *schemaGenContext) NewTupleElement(schema *spec.Schema, index int) *schemaGenContext {
+ debugLog("New tuple element\n")
+
+ pg := sg.shallowClone()
+ if pg.Path == "" {
+ pg.Path = "\"" + strconv.Itoa(index) + "\""
+ } else {
+ pg.Path = pg.Path + "+ \".\"+\"" + strconv.Itoa(index) + "\""
+ }
+ pg.ValueExpr = pg.ValueExpr + ".P" + strconv.Itoa(index)
+
+ pg.Required = true
+ pg.IsTuple = true
+ pg.Schema = *schema
+
+ return pg
+}
+
+func (sg *schemaGenContext) NewStructBranch(name string, schema spec.Schema) *schemaGenContext {
+ debugLog("new struct branch %s (parent %s)", sg.Name, sg.Container)
+ pg := sg.shallowClone()
+ if sg.Path == "" {
+ pg.Path = fmt.Sprintf("%q", name)
+ } else {
+ pg.Path = pg.Path + "+\".\"+" + fmt.Sprintf("%q", name)
+ }
+ pg.Name = name
+ pg.ValueExpr = pg.ValueExpr + "." + pascalize(goName(&schema, name))
+ pg.Schema = schema
+ for _, fn := range sg.Schema.Required {
+ if name == fn {
+ pg.Required = true
+ break
+ }
+ }
+ debugLog("made new struct branch %s (parent %s)", pg.Name, pg.Container)
+ return pg
+}
+
+func (sg *schemaGenContext) shallowClone() *schemaGenContext {
+ debugLog("cloning context %s\n", sg.Name)
+ pg := new(schemaGenContext)
+ *pg = *sg
+ if pg.Container == "" {
+ pg.Container = sg.Name
+ }
+ pg.GenSchema = GenSchema{StructTags: sg.StructTags}
+ pg.Dependencies = nil
+ pg.Named = false
+ pg.Index = 0
+ pg.IsTuple = false
+ pg.IncludeValidator = sg.IncludeValidator
+ pg.IncludeModel = sg.IncludeModel
+ pg.StrictAdditionalProperties = sg.StrictAdditionalProperties
+ return pg
+}
+
+func (sg *schemaGenContext) NewCompositionBranch(schema spec.Schema, index int) *schemaGenContext {
+ debugLog("new composition branch %s (parent: %s, index: %d)", sg.Name, sg.Container, index)
+ pg := sg.shallowClone()
+ pg.Schema = schema
+ pg.Name = "AO" + strconv.Itoa(index)
+ if sg.Name != sg.TypeResolver.ModelName {
+ pg.Name = sg.Name + pg.Name
+ }
+ pg.Index = index
+ debugLog("made new composition branch %s (parent: %s)", pg.Name, pg.Container)
+ return pg
+}
+
+func (sg *schemaGenContext) NewAdditionalProperty(schema spec.Schema) *schemaGenContext {
+ debugLog("new additional property %s (expr: %s)", sg.Name, sg.ValueExpr)
+ pg := sg.shallowClone()
+ pg.Schema = schema
+ if pg.KeyVar == "" {
+ pg.ValueExpr = sg.ValueExpr
+ }
+ pg.KeyVar += "k"
+ pg.ValueExpr += "[" + pg.KeyVar + "]"
+ pg.Path = pg.KeyVar
+ pg.GenSchema.Suffix = "Value"
+ if sg.Path != "" {
+ pg.Path = sg.Path + "+\".\"+" + pg.KeyVar
+ }
+ // propagates the special IsNullable override for maps of slices and
+ // maps of aliased types.
+ pg.GenSchema.IsMapNullOverride = sg.GenSchema.IsMapNullOverride
+ return pg
+}
+
+func hasContextValidations(model *spec.Schema) bool {
+ // always assume ref needs context validate
+ // TODO: find away to determine ref needs context validate or not
+ if model.ReadOnly || model.Ref.String() != "" {
+ return true
+ }
+ return false
+}
+
+func hasValidations(model *spec.Schema, isRequired bool) bool {
+ if isRequired {
+ return true
+ }
+
+ v := model.Validations()
+ if v.HasNumberValidations() || v.HasStringValidations() || v.HasArrayValidations() || v.HasEnum() || v.HasObjectValidations() {
+ return true
+ }
+
+ // since this was added to deal with discriminator, we'll fix this when testing discriminated types
+ if len(model.Properties) > 0 && model.Discriminator == "" {
+ return true
+ }
+
+ // lift validations from allOf branches
+ for _, s := range model.AllOf {
+ schema := s
+ if s.Ref.String() != "" || hasValidations(&schema, false) {
+ return true
+ }
+ }
+
+ return false
+}
+
+func hasFormatValidation(tpe resolvedType) bool {
+ if tpe.IsCustomFormatter && !tpe.IsStream && !tpe.IsBase64 {
+ return true
+ }
+ if tpe.IsArray && tpe.ElemType != nil {
+ return hasFormatValidation(*tpe.ElemType)
+ }
+ return false
+}
+
+func (sg *schemaGenContext) schemaValidations() sharedValidations {
+ model := sg.Schema
+
+ isRequired := sg.Required
+ if model.Default != nil || model.ReadOnly {
+ // when readOnly or default is specified, this disables Required validation (Swagger-specific)
+ isRequired = false
+ if sg.Required {
+ log.Printf("warn: properties with a default value or readOnly should not be required [%s]", sg.Name)
+ }
+ }
+
+ v := model.Validations()
+ return sharedValidations{
+ Required: sg.Required, /* TODO(fred): guard for cases with discriminator field, default and readOnly*/
+ SchemaValidations: v,
+ HasSliceValidations: v.HasArrayValidations() || v.HasEnum(),
+ HasValidations: hasValidations(&model, isRequired),
+ }
+}
+
+func mergeValidation(other *schemaGenContext) bool {
+ // NOTE: NeesRequired and NeedsValidation are deprecated
+ if other.GenSchema.AdditionalProperties != nil && other.GenSchema.AdditionalProperties.HasValidations {
+ return true
+ }
+ if other.GenSchema.AdditionalItems != nil && other.GenSchema.AdditionalItems.HasValidations {
+ return true
+ }
+ for _, sch := range other.GenSchema.AllOf {
+ if sch.HasValidations {
+ return true
+ }
+ }
+ return other.GenSchema.HasValidations
+}
+
+func (sg *schemaGenContext) MergeResult(other *schemaGenContext, liftsRequired bool) {
+ sg.GenSchema.HasValidations = sg.GenSchema.HasValidations || mergeValidation(other)
+ sg.GenSchema.HasContextValidations = sg.GenSchema.HasContextValidations || other.GenSchema.HasContextValidations
+
+ if liftsRequired && other.GenSchema.AdditionalProperties != nil && other.GenSchema.AdditionalProperties.Required {
+ sg.GenSchema.Required = true
+ }
+ if liftsRequired && other.GenSchema.Required {
+ sg.GenSchema.Required = other.GenSchema.Required
+ }
+
+ if other.GenSchema.HasBaseType {
+ sg.GenSchema.HasBaseType = other.GenSchema.HasBaseType
+ }
+
+ sg.Dependencies = append(sg.Dependencies, other.Dependencies...)
+
+ // lift extra schemas
+ for k, v := range other.ExtraSchemas {
+ sg.ExtraSchemas[k] = v
+ }
+ if other.GenSchema.IsMapNullOverride {
+ sg.GenSchema.IsMapNullOverride = true
+ }
+
+ // lift extra imports
+ if other.GenSchema.Pkg != "" && other.GenSchema.PkgAlias != "" {
+ sg.GenSchema.ExtraImports[other.GenSchema.PkgAlias] = other.GenSchema.Pkg
+ }
+ for k, v := range other.GenSchema.ExtraImports {
+ sg.GenSchema.ExtraImports[k] = v
+ }
+}
+
+func (sg *schemaGenContext) buildProperties() error {
+ debugLog("building properties %s (parent: %s)", sg.Name, sg.Container)
+
+ for k, v := range sg.Schema.Properties {
+ debugLogAsJSON("building property %s[%q] (IsTuple: %t) (IsBaseType: %t) (HasValidations: %t)",
+ sg.Name, k, sg.IsTuple, sg.GenSchema.IsBaseType, sg.GenSchema.HasValidations, v)
+
+ vv := v
+
+ // check if this requires de-anonymizing, if so lift this as a new struct and extra schema
+ tpe, err := sg.TypeResolver.ResolveSchema(&vv, true, sg.IsTuple || swag.ContainsStrings(sg.Schema.Required, k))
+ if err != nil {
+ return err
+ }
+ if sg.Schema.Discriminator == k {
+ tpe.IsNullable = false
+ }
+
+ var hasValidation bool
+ if tpe.IsComplexObject && tpe.IsAnonymous && len(v.Properties) > 0 {
+ // this is an anonymous complex construct: build a new new type for it
+ pg := sg.makeNewStruct(sg.makeRefName()+swag.ToGoName(k), v)
+ pg.IsTuple = sg.IsTuple
+ if sg.Path != "" {
+ pg.Path = sg.Path + "+ \".\"+" + fmt.Sprintf("%q", k)
+ } else {
+ pg.Path = fmt.Sprintf("%q", k)
+ }
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+ if v.Discriminator != "" {
+ pg.GenSchema.IsBaseType = true
+ pg.GenSchema.IsExported = true
+ pg.GenSchema.HasBaseType = true
+ }
+
+ vv = *spec.RefProperty("#/definitions/" + pg.Name)
+ hasValidation = pg.GenSchema.HasValidations
+ sg.ExtraSchemas[pg.Name] = pg.GenSchema
+ // NOTE: MergeResult lifts validation status and extra schemas
+ sg.MergeResult(pg, false)
+ }
+
+ emprop := sg.NewStructBranch(k, vv)
+ emprop.IsTuple = sg.IsTuple
+
+ if err := emprop.makeGenSchema(); err != nil {
+ return err
+ }
+
+ // whatever the validations says, if we have an interface{}, do not validate
+ // NOTE: this may be the case when the type is left empty and we get a Enum validation.
+ if emprop.GenSchema.IsInterface || emprop.GenSchema.IsStream {
+ emprop.GenSchema.HasValidations = false
+ } else if hasValidation || emprop.GenSchema.HasValidations || emprop.GenSchema.Required || emprop.GenSchema.IsAliased || len(emprop.GenSchema.AllOf) > 0 {
+ emprop.GenSchema.HasValidations = true
+ sg.GenSchema.HasValidations = true
+ }
+
+ // generates format validation on property
+ emprop.GenSchema.HasValidations = emprop.GenSchema.HasValidations || hasFormatValidation(tpe)
+
+ if emprop.Schema.Ref.String() != "" {
+ // expand the schema of this property, so we take informed decisions about its type
+ ref := emprop.Schema.Ref
+ var sch *spec.Schema
+ for ref.String() != "" {
+ var rsch *spec.Schema
+ var err error
+ specDoc := sg.TypeResolver.Doc
+ rsch, err = spec.ResolveRef(specDoc.Spec(), &ref)
+ if err != nil {
+ return err
+ }
+ if rsch == nil {
+ return errors.New("spec.ResolveRef returned nil schema")
+ }
+ if rsch != nil && rsch.Ref.String() != "" {
+ ref = rsch.Ref
+ continue
+ }
+ ref = spec.Ref{}
+ sch = rsch
+ }
+
+ if emprop.Discrimination != nil {
+ if _, ok := emprop.Discrimination.Discriminators[emprop.Schema.Ref.String()]; ok {
+ emprop.GenSchema.IsBaseType = true
+ emprop.GenSchema.IsNullable = false
+ emprop.GenSchema.HasBaseType = true
+ }
+ if _, ok := emprop.Discrimination.Discriminated[emprop.Schema.Ref.String()]; ok {
+ emprop.GenSchema.IsSubType = true
+ }
+ }
+
+ // set property name
+ var nm = filepath.Base(emprop.Schema.Ref.GetURL().Fragment)
+
+ tr := sg.TypeResolver.NewWithModelName(goName(&emprop.Schema, swag.ToGoName(nm)))
+ ttpe, err := tr.ResolveSchema(sch, false, true)
+ if err != nil {
+ return err
+ }
+ if ttpe.IsAliased {
+ emprop.GenSchema.IsAliased = true
+ }
+
+ // lift validations
+ hv := hasValidations(sch, false)
+
+ // include format validation, excluding binary
+ hv = hv || hasFormatValidation(ttpe)
+
+ // a base type property is always validated against the base type
+ // exception: for the base type definition itself (see shallowValidationLookup())
+ if (hv || emprop.GenSchema.IsBaseType) && !(emprop.GenSchema.IsInterface || emprop.GenSchema.IsStream) {
+ emprop.GenSchema.HasValidations = true
+ }
+ if ttpe.HasAdditionalItems && sch.AdditionalItems.Schema != nil {
+ // when AdditionalItems specifies a Schema, there is a validation
+ // check if we stepped upon an exception
+ child, err := tr.ResolveSchema(sch.AdditionalItems.Schema, false, true)
+ if err != nil {
+ return err
+ }
+ if !child.IsInterface && !child.IsStream {
+ emprop.GenSchema.HasValidations = true
+ }
+ }
+ if ttpe.IsMap && sch.AdditionalProperties != nil && sch.AdditionalProperties.Schema != nil {
+ // when AdditionalProperties specifies a Schema, there is a validation
+ // check if we stepped upon an exception
+ child, err := tr.ResolveSchema(sch.AdditionalProperties.Schema, false, true)
+ if err != nil {
+ return err
+ }
+ if !child.IsInterface && !child.IsStream {
+ emprop.GenSchema.HasValidations = true
+ }
+ }
+ }
+
+ if sg.Schema.Discriminator == k {
+ // this is the discriminator property:
+ // it is required, but forced as non-nullable,
+ // since we never fill it with a zero-value
+ // TODO: when no other property than discriminator, there is no validation
+ emprop.GenSchema.IsNullable = false
+ }
+ if emprop.GenSchema.IsBaseType {
+ sg.GenSchema.HasBaseType = true
+ }
+ sg.MergeResult(emprop, false)
+
+ // when discriminated, data is accessed via a getter func
+ if emprop.GenSchema.HasDiscriminator {
+ emprop.GenSchema.ValueExpression += asMethod
+ }
+
+ emprop.GenSchema.Extensions = emprop.Schema.Extensions
+
+ // set custom serializer tag
+ if customTag, found := tpe.Extensions[xGoCustomTag]; found {
+ tagAsStr, ok := customTag.(string)
+ if ok {
+ emprop.GenSchema.CustomTag = tagAsStr
+ } else {
+ log.Printf("warning: expect %s extension to be a string, got: %v. Skipped", xGoCustomTag, customTag)
+ }
+ }
+ sg.GenSchema.Properties = append(sg.GenSchema.Properties, emprop.GenSchema)
+ }
+ sort.Sort(sg.GenSchema.Properties)
+
+ return nil
+}
+
+func (sg *schemaGenContext) buildAllOf() error {
+ if len(sg.Schema.AllOf) == 0 {
+ return nil
+ }
+
+ var hasArray, hasNonArray int
+
+ sort.Sort(sg.GenSchema.AllOf)
+ if sg.Container == "" {
+ sg.Container = sg.Name
+ }
+ debugLogAsJSON("building all of for %d entries", len(sg.Schema.AllOf), sg.Schema)
+ for i, schema := range sg.Schema.AllOf {
+ sch := schema
+ tpe, ert := sg.TypeResolver.ResolveSchema(&sch, sch.Ref.String() == "", false)
+ if ert != nil {
+ return ert
+ }
+
+ // check for multiple arrays in allOf branches.
+ // Although a valid JSON-Schema construct, it is not suited for serialization.
+ // This is the same if we attempt to serialize an array with another object.
+ // We issue a generation warning on this.
+ if tpe.IsArray {
+ hasArray++
+ } else {
+ hasNonArray++
+ }
+ debugLogAsJSON("trying", sch)
+ if (tpe.IsAnonymous && len(sch.AllOf) > 0) || (sch.Ref.String() == "" && !tpe.IsComplexObject && (tpe.IsArray || tpe.IsInterface || tpe.IsPrimitive)) {
+ // cases where anonymous structures cause the creation of a new type:
+ // - nested allOf: this one is itself a AllOf: build a new type for it
+ // - anonymous simple types for edge cases: array, primitive, interface{}
+ // NOTE: when branches are aliased or anonymous, the nullable property in the branch type is lost.
+ name := swag.ToVarName(goName(&sch, sg.makeRefName()+"AllOf"+strconv.Itoa(i)))
+ debugLog("building anonymous nested allOf in %s: %s", sg.Name, name)
+ ng := sg.makeNewStruct(name, sch)
+ if err := ng.makeGenSchema(); err != nil {
+ return err
+ }
+
+ newsch := spec.RefProperty("#/definitions/" + ng.Name)
+ sg.Schema.AllOf[i] = *newsch
+
+ pg := sg.NewCompositionBranch(*newsch, i)
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+
+ // lift extra schemas & validations from new type
+ pg.MergeResult(ng, true)
+
+ // lift validations when complex or ref'ed:
+ // - parent always calls its Validatable child
+ // - child may or may not have validations
+ //
+ // Exception: child is not Validatable when interface or stream
+ if !pg.GenSchema.IsInterface && !pg.GenSchema.IsStream {
+ sg.GenSchema.HasValidations = true
+ }
+
+ // add the newly created type to the list of schemas to be rendered inline
+ pg.ExtraSchemas[ng.Name] = ng.GenSchema
+
+ sg.MergeResult(pg, true)
+
+ sg.GenSchema.AllOf = append(sg.GenSchema.AllOf, pg.GenSchema)
+
+ continue
+ }
+
+ comprop := sg.NewCompositionBranch(sch, i)
+ if err := comprop.makeGenSchema(); err != nil {
+ return err
+ }
+ if comprop.GenSchema.IsMap && comprop.GenSchema.HasAdditionalProperties && comprop.GenSchema.AdditionalProperties != nil && !comprop.GenSchema.IsInterface {
+ // the anonymous branch is a map for AdditionalProperties: rewrite value expression
+ comprop.GenSchema.ValueExpression = comprop.GenSchema.ValueExpression + "." + comprop.Name
+ comprop.GenSchema.AdditionalProperties.ValueExpression = comprop.GenSchema.ValueExpression + "[" + comprop.GenSchema.AdditionalProperties.KeyVar + "]"
+ }
+
+ // lift validations when complex or ref'ed
+ if (comprop.GenSchema.IsComplexObject || comprop.Schema.Ref.String() != "") && !(comprop.GenSchema.IsInterface || comprop.GenSchema.IsStream) {
+ comprop.GenSchema.HasValidations = true
+ }
+ sg.MergeResult(comprop, true)
+ sg.GenSchema.AllOf = append(sg.GenSchema.AllOf, comprop.GenSchema)
+ }
+
+ if hasArray > 1 || (hasArray > 0 && hasNonArray > 0) {
+ log.Printf("warning: cannot generate serializable allOf with conflicting array definitions in %s", sg.Container)
+ }
+
+ // AllOf types are always considered nullable, except when an extension says otherwise
+ if override, ok := sg.TypeResolver.isNullableOverride(&sg.Schema); ok {
+ sg.GenSchema.IsNullable = override
+ } else {
+ sg.GenSchema.IsNullable = true
+ }
+
+ // prevent IsAliased to bubble up (e.g. when a single branch is itself aliased)
+ sg.GenSchema.IsAliased = sg.GenSchema.IsAliased && len(sg.GenSchema.AllOf) < 2
+
+ return nil
+}
+
+type mapStack struct {
+ Type *spec.Schema
+ Next *mapStack
+ Previous *mapStack
+ ValueRef *schemaGenContext
+ Context *schemaGenContext
+ NewObj *schemaGenContext
+}
+
+func newMapStack(context *schemaGenContext) (first, last *mapStack, err error) {
+ ms := &mapStack{
+ Type: &context.Schema,
+ Context: context,
+ }
+
+ l := ms
+ for l.HasMore() {
+ tpe, err := l.Context.TypeResolver.ResolveSchema(l.Type.AdditionalProperties.Schema, true, true)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ if !tpe.IsMap {
+ // reached the end of the rabbit hole
+ if tpe.IsComplexObject && tpe.IsAnonymous {
+ // found an anonymous object: create the struct from a newly created definition
+ nw := l.Context.makeNewStruct(l.Context.makeRefName()+" Anon", *l.Type.AdditionalProperties.Schema)
+ sch := spec.RefProperty("#/definitions/" + nw.Name)
+ l.NewObj = nw
+
+ l.Type.AdditionalProperties.Schema = sch
+ l.ValueRef = l.Context.NewAdditionalProperty(*sch)
+ }
+
+ // other cases where to stop are: a $ref or a simple object
+ break
+ }
+
+ // continue digging for maps
+ l.Next = &mapStack{
+ Previous: l,
+ Type: l.Type.AdditionalProperties.Schema,
+ Context: l.Context.NewAdditionalProperty(*l.Type.AdditionalProperties.Schema),
+ }
+ l = l.Next
+ }
+
+ // return top and bottom entries of this stack of AdditionalProperties
+ return ms, l, nil
+}
+
+// Build rewinds the stack of additional properties, building schemas from bottom to top
+func (mt *mapStack) Build() error {
+ if mt.NewObj == nil && mt.ValueRef == nil && mt.Next == nil && mt.Previous == nil {
+ csch := mt.Type.AdditionalProperties.Schema
+ cp := mt.Context.NewAdditionalProperty(*csch)
+ d := mt.Context.TypeResolver.Doc
+
+ asch, err := analysis.Schema(analysis.SchemaOpts{
+ Root: d.Spec(),
+ BasePath: d.SpecFilePath(),
+ Schema: csch,
+ })
+ if err != nil {
+ return err
+ }
+ cp.Required = !asch.IsSimpleSchema && !asch.IsMap
+
+ // when the schema is an array or an alias, this may result in inconsistent
+ // nullable status between the map element and the array element (resp. the aliased type).
+ //
+ // Example: when an object has no property and only additionalProperties,
+ // which turn out to be arrays of some other object.
+
+ // save the initial override
+ hadOverride := cp.GenSchema.IsMapNullOverride
+ if err := cp.makeGenSchema(); err != nil {
+ return err
+ }
+
+ // if we have an override at the top of stack, propagates it down nested arrays
+ if hadOverride && cp.GenSchema.IsArray {
+ // do it for nested arrays: override is also about map[string][][]... constructs
+ it := &cp.GenSchema
+ for it.Items != nil && it.IsArray {
+ it.Items.IsMapNullOverride = hadOverride
+ it = it.Items
+ }
+ }
+ // cover other cases than arrays (aliased types)
+ cp.GenSchema.IsMapNullOverride = hadOverride
+
+ mt.Context.MergeResult(cp, false)
+ mt.Context.GenSchema.AdditionalProperties = &cp.GenSchema
+
+ // lift validations
+ if (csch.Ref.String() != "" || cp.GenSchema.IsAliased) && !(cp.GenSchema.IsInterface || cp.GenSchema.IsStream) {
+ // - we stopped on a ref, or anything else that require we call its Validate() method
+ // - if the alias / ref is on an interface (or stream) type: no validation
+ mt.Context.GenSchema.HasValidations = true
+ mt.Context.GenSchema.AdditionalProperties.HasValidations = true
+ }
+
+ debugLog("early mapstack exit, nullable: %t for %s", cp.GenSchema.IsNullable, cp.GenSchema.Name)
+ return nil
+ }
+ cur := mt
+ for cur != nil {
+ if cur.NewObj != nil {
+ // a new model has been created during the stack construction (new ref on anonymous object)
+ if err := cur.NewObj.makeGenSchema(); err != nil {
+ return err
+ }
+ }
+
+ if cur.ValueRef != nil {
+ if err := cur.ValueRef.makeGenSchema(); err != nil {
+ return nil
+ }
+ }
+
+ if cur.NewObj != nil {
+ // newly created model from anonymous object is declared as extra schema
+ cur.Context.MergeResult(cur.NewObj, false)
+
+ // propagates extra schemas
+ cur.Context.ExtraSchemas[cur.NewObj.Name] = cur.NewObj.GenSchema
+ }
+
+ if cur.ValueRef != nil {
+ // this is the genSchema for this new anonymous AdditionalProperty
+ if err := cur.Context.makeGenSchema(); err != nil {
+ return err
+ }
+
+ // if there is a ValueRef, we must have a NewObj (from newMapStack() construction)
+ cur.ValueRef.GenSchema.HasValidations = cur.NewObj.GenSchema.HasValidations
+ cur.Context.MergeResult(cur.ValueRef, false)
+ cur.Context.GenSchema.AdditionalProperties = &cur.ValueRef.GenSchema
+ }
+
+ if cur.Previous != nil {
+ // we have a parent schema: build a schema for current AdditionalProperties
+ if err := cur.Context.makeGenSchema(); err != nil {
+ return err
+ }
+ }
+ if cur.Next != nil {
+ // we previously made a child schema: lifts things from that one
+ // - Required is not lifted (in a cascade of maps, only the last element is actually checked for Required)
+ cur.Context.MergeResult(cur.Next.Context, false)
+ cur.Context.GenSchema.AdditionalProperties = &cur.Next.Context.GenSchema
+
+ // lift validations
+ c := &cur.Next.Context.GenSchema
+ if (cur.Next.Context.Schema.Ref.String() != "" || c.IsAliased) && !(c.IsInterface || c.IsStream) {
+ // - we stopped on a ref, or anything else that require we call its Validate()
+ // - if the alias / ref is on an interface (or stream) type: no validation
+ cur.Context.GenSchema.HasValidations = true
+ cur.Context.GenSchema.AdditionalProperties.HasValidations = true
+ }
+ }
+ if cur.ValueRef != nil {
+ cur.Context.MergeResult(cur.ValueRef, false)
+ cur.Context.GenSchema.AdditionalProperties = &cur.ValueRef.GenSchema
+ }
+
+ if cur.Context.GenSchema.AdditionalProperties != nil {
+ // propagate overrides up the resolved schemas, but leaves any ExtraSchema untouched
+ cur.Context.GenSchema.AdditionalProperties.IsMapNullOverride = cur.Context.GenSchema.IsMapNullOverride
+ }
+ cur = cur.Previous
+ }
+
+ return nil
+}
+
+func (mt *mapStack) HasMore() bool {
+ return mt.Type.AdditionalProperties != nil && (mt.Type.AdditionalProperties.Schema != nil || mt.Type.AdditionalProperties.Allows)
+}
+
+/* currently unused:
+func (mt *mapStack) Dict() map[string]interface{} {
+ res := make(map[string]interface{})
+ res["context"] = mt.Context.Schema
+ if mt.Next != nil {
+ res["next"] = mt.Next.Dict()
+ }
+ if mt.NewObj != nil {
+ res["obj"] = mt.NewObj.Schema
+ }
+ if mt.ValueRef != nil {
+ res["value"] = mt.ValueRef.Schema
+ }
+ return res
+}
+*/
+
+func (sg *schemaGenContext) buildAdditionalProperties() error {
+ if sg.Schema.AdditionalProperties == nil {
+ return nil
+ }
+ addp := *sg.Schema.AdditionalProperties
+
+ wantsAdditional := addp.Schema != nil || addp.Allows
+ sg.GenSchema.HasAdditionalProperties = wantsAdditional
+ if !wantsAdditional {
+ return nil
+ }
+
+ // flag swap
+ if sg.GenSchema.IsComplexObject {
+ sg.GenSchema.IsAdditionalProperties = true
+ sg.GenSchema.IsComplexObject = false
+ sg.GenSchema.IsMap = false
+ }
+
+ if addp.Schema == nil {
+ // this is for AdditionalProperties:true|false
+ if addp.Allows {
+ // additionalProperties: true is rendered as: map[string]interface{}
+ addp.Schema = &spec.Schema{}
+
+ addp.Schema.Typed("object", "")
+ sg.GenSchema.HasAdditionalProperties = true
+ sg.GenSchema.IsComplexObject = false
+ sg.GenSchema.IsMap = true
+
+ sg.GenSchema.ValueExpression += "." + swag.ToGoName(sg.Name+" additionalProperties")
+ cp := sg.NewAdditionalProperty(*addp.Schema)
+ cp.Name += "AdditionalProperties"
+ cp.Required = false
+ if err := cp.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.MergeResult(cp, false)
+ sg.GenSchema.AdditionalProperties = &cp.GenSchema
+ debugLog("added interface{} schema for additionalProperties[allows == true], IsInterface=%t", cp.GenSchema.IsInterface)
+ }
+ return nil
+ }
+
+ if !sg.GenSchema.IsMap && (sg.GenSchema.IsAdditionalProperties && sg.Named) {
+ // we have a complex object with an AdditionalProperties schema
+
+ tpe, ert := sg.TypeResolver.ResolveSchema(addp.Schema, addp.Schema.Ref.String() == "", false)
+ if ert != nil {
+ return ert
+ }
+
+ if tpe.IsComplexObject && tpe.IsAnonymous {
+ // if the AdditionalProperties is an anonymous complex object, generate a new type for it
+ pg := sg.makeNewStruct(sg.makeRefName()+" Anon", *addp.Schema)
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.MergeResult(pg, false)
+ sg.ExtraSchemas[pg.Name] = pg.GenSchema
+
+ sg.Schema.AdditionalProperties.Schema = spec.RefProperty("#/definitions/" + pg.Name)
+ sg.IsVirtual = true
+
+ comprop := sg.NewAdditionalProperty(*sg.Schema.AdditionalProperties.Schema)
+ if err := comprop.makeGenSchema(); err != nil {
+ return err
+ }
+
+ comprop.GenSchema.Required = true
+ comprop.GenSchema.HasValidations = true
+
+ comprop.GenSchema.ValueExpression = sg.GenSchema.ValueExpression + "." + swag.ToGoName(sg.GenSchema.Name) + "[" + comprop.KeyVar + "]"
+
+ sg.GenSchema.AdditionalProperties = &comprop.GenSchema
+ sg.GenSchema.HasAdditionalProperties = true
+ sg.GenSchema.ValueExpression += "." + swag.ToGoName(sg.GenSchema.Name)
+
+ sg.MergeResult(comprop, false)
+
+ return nil
+ }
+
+ // this is a regular named schema for AdditionalProperties
+ sg.GenSchema.ValueExpression += "." + swag.ToGoName(sg.GenSchema.Name)
+ comprop := sg.NewAdditionalProperty(*addp.Schema)
+ d := sg.TypeResolver.Doc
+ asch, err := analysis.Schema(analysis.SchemaOpts{
+ Root: d.Spec(),
+ BasePath: d.SpecFilePath(),
+ Schema: addp.Schema,
+ })
+ if err != nil {
+ return err
+ }
+ comprop.Required = !asch.IsSimpleSchema && !asch.IsMap
+ if err := comprop.makeGenSchema(); err != nil {
+ return err
+ }
+
+ sg.MergeResult(comprop, false)
+ sg.GenSchema.AdditionalProperties = &comprop.GenSchema
+ sg.GenSchema.AdditionalProperties.ValueExpression = sg.GenSchema.ValueExpression + "[" + comprop.KeyVar + "]"
+
+ // rewrite value expression for arrays and arrays of arrays in maps (rendered as map[string][][]...)
+ if sg.GenSchema.AdditionalProperties.IsArray {
+ // maps of slices are where an override may take effect
+ sg.GenSchema.AdditionalProperties.Items.IsMapNullOverride = sg.GenSchema.AdditionalProperties.IsMapNullOverride
+ sg.GenSchema.AdditionalProperties.Items.ValueExpression = sg.GenSchema.ValueExpression + "[" + comprop.KeyVar + "]" + "[" + sg.GenSchema.AdditionalProperties.IndexVar + "]"
+ ap := sg.GenSchema.AdditionalProperties.Items
+ for ap != nil && ap.IsArray {
+ ap.Items.IsMapNullOverride = ap.IsMapNullOverride
+ ap.Items.ValueExpression = ap.ValueExpression + "[" + ap.IndexVar + "]"
+ ap = ap.Items
+ }
+ }
+
+ // lift validation
+ if (sg.GenSchema.AdditionalProperties.IsComplexObject || sg.GenSchema.AdditionalProperties.IsAliased || sg.GenSchema.AdditionalProperties.Required) && !(sg.GenSchema.AdditionalProperties.IsInterface || sg.GenSchema.IsStream) {
+ sg.GenSchema.HasValidations = true
+ }
+ return nil
+ }
+
+ if sg.GenSchema.IsMap && wantsAdditional {
+ // this is itself an AdditionalProperties schema with some AdditionalProperties.
+ // this also runs for aliased map types (with zero properties save additionalProperties)
+ //
+ // find out how deep this rabbit hole goes
+ // descend, unwind and rewrite
+ // This needs to be depth first, so it first goes as deep as it can and then
+ // builds the result in reverse order.
+ _, ls, err := newMapStack(sg)
+ if err != nil {
+ return err
+ }
+ return ls.Build()
+ }
+
+ if sg.GenSchema.IsAdditionalProperties && !sg.Named {
+ // for an anonymous object, first build the new object
+ // and then replace the current one with a $ref to the
+ // new object
+ newObj := sg.makeNewStruct(sg.GenSchema.Name+" P"+strconv.Itoa(sg.Index), sg.Schema)
+ if err := newObj.makeGenSchema(); err != nil {
+ return err
+ }
+
+ hasMapNullOverride := sg.GenSchema.IsMapNullOverride
+ sg.GenSchema = GenSchema{StructTags: sg.StructTags}
+ sg.Schema = *spec.RefProperty("#/definitions/" + newObj.Name)
+ if err := sg.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.MergeResult(newObj, false)
+
+ sg.GenSchema.IsMapNullOverride = hasMapNullOverride
+ if sg.GenSchema.IsArray {
+ sg.GenSchema.Items.IsMapNullOverride = hasMapNullOverride
+ }
+
+ sg.GenSchema.HasValidations = newObj.GenSchema.HasValidations
+ sg.ExtraSchemas[newObj.Name] = newObj.GenSchema
+ return nil
+ }
+ return nil
+}
+
+func (sg *schemaGenContext) makeNewStruct(name string, schema spec.Schema) *schemaGenContext {
+ debugLog("making new struct: name: %s, container: %s", name, sg.Container)
+ sp := sg.TypeResolver.Doc.Spec()
+ name = swag.ToGoName(name)
+ if sg.TypeResolver.ModelName != sg.Name {
+ name = swag.ToGoName(sg.TypeResolver.ModelName + " " + name)
+ }
+ if sp.Definitions == nil {
+ sp.Definitions = make(spec.Definitions)
+ }
+ sp.Definitions[name] = schema
+ pg := schemaGenContext{
+ Path: "",
+ Name: name,
+ Receiver: sg.Receiver,
+ IndexVar: "i",
+ ValueExpr: sg.Receiver,
+ Schema: schema,
+ Required: false,
+ Named: true,
+ ExtraSchemas: make(map[string]GenSchema),
+ Discrimination: sg.Discrimination,
+ Container: sg.Container,
+ IncludeValidator: sg.IncludeValidator,
+ IncludeModel: sg.IncludeModel,
+ StrictAdditionalProperties: sg.StrictAdditionalProperties,
+ StructTags: sg.StructTags,
+ }
+ if schema.Ref.String() == "" {
+ pg.TypeResolver = sg.TypeResolver.NewWithModelName(name)
+ }
+ pg.GenSchema.IsVirtual = true
+
+ sg.ExtraSchemas[name] = pg.GenSchema
+ return &pg
+}
+
+func (sg *schemaGenContext) buildArray() error {
+ tpe, err := sg.TypeResolver.ResolveSchema(sg.Schema.Items.Schema, true, false)
+ if err != nil {
+ return err
+ }
+
+ // check if the element is a complex object, if so generate a new type for it
+ if tpe.IsComplexObject && tpe.IsAnonymous {
+ pg := sg.makeNewStruct(sg.makeRefName()+" items"+strconv.Itoa(sg.Index), *sg.Schema.Items.Schema)
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.MergeResult(pg, false)
+ sg.ExtraSchemas[pg.Name] = pg.GenSchema
+ sg.Schema.Items.Schema = spec.RefProperty("#/definitions/" + pg.Name)
+ sg.IsVirtual = true
+ return sg.makeGenSchema()
+ }
+
+ // create the generation schema for items
+ elProp := sg.NewSliceBranch(sg.Schema.Items.Schema)
+
+ // when building a slice of maps, the map item is not required
+ // items from maps of aliased or nullable type remain required
+
+ // NOTE(fredbi): since this is reset below, this Required = true serves the obscure purpose
+ // of indirectly lifting validations from the slice. This is carried out differently now.
+ // elProp.Required = true
+
+ if err := elProp.makeGenSchema(); err != nil {
+ return err
+ }
+
+ sg.MergeResult(elProp, false)
+
+ sg.GenSchema.IsBaseType = elProp.GenSchema.IsBaseType
+ sg.GenSchema.ItemsEnum = elProp.GenSchema.Enum
+ elProp.GenSchema.Suffix = "Items"
+
+ elProp.GenSchema.IsNullable = tpe.IsNullable && !tpe.HasDiscriminator
+ if elProp.GenSchema.IsNullable {
+ sg.GenSchema.GoType = "[]*" + elProp.GenSchema.GoType
+ } else {
+ sg.GenSchema.GoType = "[]" + elProp.GenSchema.GoType
+ }
+
+ sg.GenSchema.IsArray = true
+
+ schemaCopy := elProp.GenSchema
+
+ schemaCopy.Required = false
+
+ // validations of items
+ // include format validation, excluding binary and base64 format validation
+ hv := hasValidations(sg.Schema.Items.Schema, false) || hasFormatValidation(schemaCopy.resolvedType)
+
+ // base types of polymorphic types must be validated
+ // NOTE: IsNullable is not useful to figure out a validation: we use Refed and IsAliased below instead
+ if hv || elProp.GenSchema.IsBaseType {
+ schemaCopy.HasValidations = true
+ }
+
+ if (elProp.Schema.Ref.String() != "" || elProp.GenSchema.IsAliased) && !(elProp.GenSchema.IsInterface || elProp.GenSchema.IsStream) {
+ schemaCopy.HasValidations = true
+ }
+
+ // lift validations
+ sg.GenSchema.HasValidations = sg.GenSchema.HasValidations || schemaCopy.HasValidations
+ sg.GenSchema.HasSliceValidations = sg.Schema.Validations().HasArrayValidations() || sg.Schema.Validations().HasEnum()
+
+ // prevents bubbling custom formatter flag
+ sg.GenSchema.IsCustomFormatter = false
+
+ sg.GenSchema.Items = &schemaCopy
+ if sg.Named {
+ sg.GenSchema.AliasedType = sg.GenSchema.GoType
+ }
+
+ return nil
+}
+
+func (sg *schemaGenContext) buildItems() error {
+ if sg.Schema.Items == nil {
+ // in swagger, arrays MUST have an items schema
+ return nil
+ }
+
+ // in Items spec, we have either Schema (array) or Schemas (tuple)
+ presentsAsSingle := sg.Schema.Items.Schema != nil
+ if presentsAsSingle && sg.Schema.AdditionalItems != nil { // unsure if this a valid of invalid schema
+ return fmt.Errorf("single schema (%s) can't have additional items", sg.Name)
+ }
+ if presentsAsSingle {
+ return sg.buildArray()
+ }
+
+ // This is a tuple, build a new model that represents this
+ if sg.Named {
+ sg.GenSchema.Name = sg.Name
+ sg.GenSchema.GoType = sg.TypeResolver.goTypeName(sg.Name)
+ for i, sch := range sg.Schema.Items.Schemas {
+ s := sch
+ elProp := sg.NewTupleElement(&s, i)
+
+ if s.Ref.String() == "" {
+ tpe, err := sg.TypeResolver.ResolveSchema(&s, s.Ref.String() == "", true)
+ if err != nil {
+ return err
+ }
+ if tpe.IsComplexObject && tpe.IsAnonymous {
+ // if the tuple element is an anonymous complex object, build a new type for it
+ pg := sg.makeNewStruct(sg.makeRefName()+" Items"+strconv.Itoa(i), s)
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+ elProp.Schema = *spec.RefProperty("#/definitions/" + pg.Name)
+ elProp.MergeResult(pg, false)
+ elProp.ExtraSchemas[pg.Name] = pg.GenSchema
+ }
+ }
+
+ if err := elProp.makeGenSchema(); err != nil {
+ return err
+ }
+ if elProp.GenSchema.IsInterface || elProp.GenSchema.IsStream {
+ elProp.GenSchema.HasValidations = false
+ }
+ sg.MergeResult(elProp, false)
+
+ elProp.GenSchema.Name = "p" + strconv.Itoa(i)
+ sg.GenSchema.Properties = append(sg.GenSchema.Properties, elProp.GenSchema)
+ sg.GenSchema.IsTuple = true
+ }
+ return nil
+ }
+
+ // for an anonymous object, first build the new object
+ // and then replace the current one with a $ref to the
+ // new tuple object
+ var sch spec.Schema
+ sch.Typed("object", "")
+ sch.Properties = make(map[string]spec.Schema, len(sg.Schema.Items.Schemas))
+ for i, v := range sg.Schema.Items.Schemas {
+ sch.Required = append(sch.Required, "P"+strconv.Itoa(i))
+ sch.Properties["P"+strconv.Itoa(i)] = v
+ }
+ sch.AdditionalItems = sg.Schema.AdditionalItems
+ tup := sg.makeNewStruct(sg.GenSchema.Name+"Tuple"+strconv.Itoa(sg.Index), sch)
+ tup.IsTuple = true
+ if err := tup.makeGenSchema(); err != nil {
+ return err
+ }
+ tup.GenSchema.IsTuple = true
+ tup.GenSchema.IsComplexObject = false
+ tup.GenSchema.Title = tup.GenSchema.Name + " a representation of an anonymous Tuple type"
+ tup.GenSchema.Description = ""
+ sg.ExtraSchemas[tup.Name] = tup.GenSchema
+
+ sg.Schema = *spec.RefProperty("#/definitions/" + tup.Name)
+ if err := sg.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.MergeResult(tup, false)
+ return nil
+}
+
+func (sg *schemaGenContext) buildAdditionalItems() error {
+ wantsAdditionalItems :=
+ sg.Schema.AdditionalItems != nil &&
+ (sg.Schema.AdditionalItems.Allows || sg.Schema.AdditionalItems.Schema != nil)
+
+ sg.GenSchema.HasAdditionalItems = wantsAdditionalItems
+ if wantsAdditionalItems {
+ // check if the element is a complex object, if so generate a new type for it
+ tpe, err := sg.TypeResolver.ResolveSchema(sg.Schema.AdditionalItems.Schema, true, true)
+ if err != nil {
+ return err
+ }
+ if tpe.IsComplexObject && tpe.IsAnonymous {
+ pg := sg.makeNewStruct(sg.makeRefName()+" Items", *sg.Schema.AdditionalItems.Schema)
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.Schema.AdditionalItems.Schema = spec.RefProperty("#/definitions/" + pg.Name)
+ pg.GenSchema.HasValidations = true
+ sg.MergeResult(pg, false)
+ sg.ExtraSchemas[pg.Name] = pg.GenSchema
+ }
+
+ it := sg.NewAdditionalItems(sg.Schema.AdditionalItems.Schema)
+ // if AdditionalItems are themselves arrays, bump the index var
+ if tpe.IsArray {
+ it.IndexVar += "i"
+ }
+
+ if tpe.IsInterface {
+ it.Untyped = true
+ }
+
+ if err := it.makeGenSchema(); err != nil {
+ return err
+ }
+
+ // lift validations when complex is not anonymous or ref'ed
+ if (tpe.IsComplexObject || it.Schema.Ref.String() != "") && !(tpe.IsInterface || tpe.IsStream) {
+ it.GenSchema.HasValidations = true
+ }
+
+ sg.MergeResult(it, true)
+ sg.GenSchema.AdditionalItems = &it.GenSchema
+ }
+ return nil
+}
+
+func (sg *schemaGenContext) buildXMLNameWithTags() error {
+ // render some "xml" struct tag under one the following conditions:
+ // - consumes/produces in spec contains xml
+ // - struct tags CLI option contains xml
+ // - XML object present in spec for this schema
+ if sg.WithXML || swag.ContainsStrings(sg.StructTags, "xml") || sg.Schema.XML != nil {
+ sg.GenSchema.XMLName = sg.Name
+
+ if sg.Schema.XML != nil {
+ if sg.Schema.XML.Name != "" {
+ sg.GenSchema.XMLName = sg.Schema.XML.Name
+ }
+ if sg.Schema.XML.Attribute {
+ sg.GenSchema.XMLName += ",attr"
+ }
+ }
+ }
+ return nil
+}
+
+func (sg *schemaGenContext) shortCircuitNamedRef() (bool, error) {
+ // This if block ensures that a struct gets
+ // rendered with the ref as embedded ref.
+ //
+ // NOTE: this assumes that all $ref point to a definition,
+ // i.e. the spec is canonical, as guaranteed by minimal flattening.
+ //
+ // TODO: RefHandled is actually set nowhere
+ if sg.RefHandled || !sg.Named || sg.Schema.Ref.String() == "" {
+ return false, nil
+ }
+ debugLogAsJSON("short circuit named ref: %q", sg.Schema.Ref.String(), sg.Schema)
+
+ // Simple aliased types (arrays, maps and primitives)
+ //
+ // Before deciding to make a struct with a composition branch (below),
+ // check if the $ref points to a simple type or polymorphic (base) type.
+ //
+ // If this is the case, just realias this simple type, without creating a struct.
+ asch, era := analysis.Schema(analysis.SchemaOpts{
+ Root: sg.TypeResolver.Doc.Spec(),
+ BasePath: sg.TypeResolver.Doc.SpecFilePath(),
+ Schema: &sg.Schema,
+ })
+ if era != nil {
+ return false, era
+ }
+
+ if asch.IsArray || asch.IsMap || asch.IsKnownType || asch.IsBaseType {
+ tpx, ers := sg.TypeResolver.ResolveSchema(&sg.Schema, false, true)
+ if ers != nil {
+ return false, ers
+ }
+ tpe := resolvedType{}
+ tpe.IsMap = asch.IsMap
+ tpe.IsArray = asch.IsArray
+ tpe.IsPrimitive = asch.IsKnownType
+
+ tpe.IsAliased = true
+ tpe.AliasedType = ""
+ tpe.IsComplexObject = false
+ tpe.IsAnonymous = false
+ tpe.IsCustomFormatter = false
+ tpe.IsBaseType = tpx.IsBaseType
+
+ tpe.GoType = sg.TypeResolver.goTypeName(path.Base(sg.Schema.Ref.String()))
+ tpe.Pkg = sg.TypeResolver.definitionPkg
+
+ tpe.IsNullable = tpx.IsNullable // TODO
+ tpe.IsInterface = tpx.IsInterface
+ tpe.IsStream = tpx.IsStream
+ tpe.IsEmbedded = tpx.IsEmbedded
+
+ tpe.SwaggerType = tpx.SwaggerType
+ sch := spec.Schema{}
+ pg := sg.makeNewStruct(sg.Name, sch)
+ if err := pg.makeGenSchema(); err != nil {
+ return true, err
+ }
+ sg.MergeResult(pg, true)
+ sg.GenSchema = pg.GenSchema
+ sg.GenSchema.resolvedType = tpe
+ sg.GenSchema.resolvedType.IsSuperAlias = true
+ sg.GenSchema.IsBaseType = tpe.IsBaseType
+
+ return true, nil
+ }
+
+ // Aliased object: use golang struct composition.
+ // This is rendered as a struct with type field, i.e. :
+ // Alias struct {
+ // AliasedType
+ // }
+ nullableOverride := sg.GenSchema.IsNullable
+
+ tpe := resolvedType{}
+ tpe.GoType = sg.TypeResolver.goTypeName(sg.Name)
+ tpe.Pkg = sg.TypeResolver.definitionPkg
+ tpe.SwaggerType = "object"
+ tpe.IsComplexObject = true
+ tpe.IsMap = false
+ tpe.IsArray = false
+ tpe.IsAnonymous = false
+ tpe.IsNullable = sg.TypeResolver.isNullable(&sg.Schema)
+
+ item := sg.NewCompositionBranch(sg.Schema, 0)
+ if err := item.makeGenSchema(); err != nil {
+ return true, err
+ }
+ sg.GenSchema.resolvedType = tpe
+ sg.GenSchema.IsNullable = sg.GenSchema.IsNullable || nullableOverride
+ // prevent format from bubbling up in composed type
+ item.GenSchema.IsCustomFormatter = false
+
+ sg.MergeResult(item, true)
+ sg.GenSchema.AllOf = append(sg.GenSchema.AllOf, item.GenSchema)
+ return true, nil
+}
+
+// liftSpecialAllOf attempts to simplify the rendering of allOf constructs by lifting simple things into the current schema.
+func (sg *schemaGenContext) liftSpecialAllOf() error {
+ // if there is only a $ref or a primitive and an x-isnullable schema then this is a nullable pointer
+ // so this should not compose several objects, just 1
+ // if there is a ref with a discriminator then we look for x-class on the current definition to know
+ // the value of the discriminator to instantiate the class
+ if len(sg.Schema.AllOf) < 2 {
+ return nil
+ }
+ var seenSchema int
+ var seenNullable bool
+ var schemaToLift spec.Schema
+
+ for _, schema := range sg.Schema.AllOf {
+ sch := schema
+ tpe, err := sg.TypeResolver.ResolveSchema(&sch, true, true)
+ if err != nil {
+ return err
+ }
+ if sg.TypeResolver.isNullable(&sch) {
+ seenNullable = true
+ }
+ if len(sch.Type) > 0 || len(sch.Properties) > 0 || sch.Ref.GetURL() != nil || len(sch.AllOf) > 0 {
+ seenSchema++
+ if seenSchema > 1 {
+ // won't do anything if several candidates for a lift
+ break
+ }
+ if (!tpe.IsAnonymous && tpe.IsComplexObject) || tpe.IsPrimitive {
+ // lifting complex objects here results in inlined structs in the model
+ schemaToLift = sch
+ }
+ }
+ }
+
+ if seenSchema == 1 {
+ // when there only a single schema to lift in allOf, replace the schema by its allOf definition
+ debugLog("lifted schema in allOf for %s", sg.Name)
+ sg.Schema = schemaToLift
+ sg.GenSchema.IsNullable = seenNullable
+ }
+ return nil
+}
+
+func (sg *schemaGenContext) buildAliased() error {
+ if !sg.GenSchema.IsPrimitive && !sg.GenSchema.IsMap && !sg.GenSchema.IsArray && !sg.GenSchema.IsInterface {
+ return nil
+ }
+
+ if sg.GenSchema.IsPrimitive {
+ if sg.GenSchema.SwaggerType == "string" && sg.GenSchema.SwaggerFormat == "" {
+ sg.GenSchema.IsAliased = sg.GenSchema.GoType != sg.GenSchema.SwaggerType
+ }
+ if sg.GenSchema.IsNullable && sg.Named {
+ sg.GenSchema.IsNullable = false
+ }
+ }
+
+ if sg.GenSchema.IsInterface {
+ sg.GenSchema.IsAliased = sg.GenSchema.GoType != iface
+ }
+
+ if sg.GenSchema.IsMap {
+ sg.GenSchema.IsAliased = !strings.HasPrefix(sg.GenSchema.GoType, "map[")
+ }
+
+ if sg.GenSchema.IsArray {
+ sg.GenSchema.IsAliased = !strings.HasPrefix(sg.GenSchema.GoType, "[]")
+ }
+ return nil
+}
+
+func (sg schemaGenContext) makeRefName() string {
+ // figure out a longer name for deconflicting anonymous models.
+ // This is used when makeNewStruct() is followed by the creation of a new ref to definitions
+ if sg.UseContainerInName && sg.Container != sg.Name {
+ return sg.Container + swag.ToGoName(sg.Name)
+ }
+ return sg.Name
+}
+
+func (sg *schemaGenContext) GoName() string {
+ return goName(&sg.Schema, sg.Name)
+}
+
+func goName(sch *spec.Schema, orig string) string {
+ name, _ := sch.Extensions.GetString(xGoName)
+ if name != "" {
+ return name
+ }
+ return orig
+}
+
+func (sg *schemaGenContext) derefMapElement(outer *GenSchema, _ *GenSchema, elem *GenSchema) {
+ derefType := strings.TrimPrefix(elem.GoType, "*")
+
+ if outer.IsAliased {
+ nesting := strings.TrimSuffix(strings.TrimSuffix(outer.AliasedType, elem.GoType), "*")
+ outer.AliasedType = nesting + derefType
+ outer.GoType = derefType
+ } else {
+ nesting := strings.TrimSuffix(strings.TrimSuffix(outer.GoType, elem.GoType), "*")
+ outer.GoType = nesting + derefType
+ }
+
+ elem.GoType = derefType
+}
+
+func (sg *schemaGenContext) checkNeedsPointer(outer *GenSchema, sch *GenSchema, elem *GenSchema) {
+ derefType := strings.TrimPrefix(elem.GoType, "*")
+ switch {
+ case outer.IsAliased && !strings.HasSuffix(outer.AliasedType, "*"+derefType):
+ // override nullability of map of primitive elements: render element of aliased or anonymous map as a pointer
+ outer.AliasedType = strings.TrimSuffix(outer.AliasedType, derefType) + "*" + derefType
+ case sch != nil:
+ // nullable primitive
+ if sch.IsAnonymous && !strings.HasSuffix(outer.GoType, "*"+derefType) {
+ sch.GoType = strings.TrimSuffix(sch.GoType, derefType) + "*" + derefType
+ }
+ case outer.IsAnonymous && !strings.HasSuffix(outer.GoType, "*"+derefType):
+ outer.GoType = strings.TrimSuffix(outer.GoType, derefType) + "*" + derefType
+ }
+}
+
+// buildMapOfNullable equalizes the nullablity status for aliased and anonymous maps of simple things,
+// with the nullability of its innermost element.
+//
+// NOTE: at the moment, we decide to align the type of the outer element (map) to the type of the inner element
+// The opposite could be done and result in non nullable primitive elements. If we do so, the validation
+// code needs to be adapted by removing IsZero() and Required() calls in codegen.
+func (sg *schemaGenContext) buildMapOfNullable(sch *GenSchema) {
+ outer := &sg.GenSchema
+ if sch == nil {
+ sch = outer
+ }
+ if sch.IsMap && (outer.IsAliased || outer.IsAnonymous) {
+ elem := sch.AdditionalProperties
+ for elem != nil {
+ if elem.IsPrimitive && elem.IsNullable {
+ sg.checkNeedsPointer(outer, nil, elem)
+ } else if elem.IsArray {
+ // override nullability of array of primitive elements:
+ // render element of aliased or anonyous map as a pointer
+ it := elem.Items
+ for it != nil {
+ switch {
+ case it.IsPrimitive && it.IsNullable:
+ sg.checkNeedsPointer(outer, sch, it)
+ case it.IsMap:
+ sg.buildMapOfNullable(it)
+ case !it.IsPrimitive && !it.IsArray && it.IsComplexObject && it.IsNullable:
+ // structs in map are not rendered as pointer by default
+ // unless some x-nullable overrides says so
+ _, forced := it.Extensions[xNullable]
+ if !forced {
+ _, forced = it.Extensions[xIsNullable]
+ }
+ if !forced {
+ sg.derefMapElement(outer, sch, it)
+ }
+ }
+ it = it.Items
+ }
+ }
+ elem = elem.AdditionalProperties
+ }
+ }
+}
+
+func (sg *schemaGenContext) makeGenSchema() error {
+ debugLogAsJSON("making gen schema (anon: %t, req: %t, tuple: %t) %s\n",
+ !sg.Named, sg.Required, sg.IsTuple, sg.Name, sg.Schema)
+
+ sg.GenSchema.Example = ""
+ if sg.Schema.Example != nil {
+ data, err := asJSON(sg.Schema.Example)
+ if err != nil {
+ return err
+ }
+ // Deleting the unnecessary double quotes for string types
+ // otherwise the generate spec will generate as "\"foo\""
+ sg.GenSchema.Example = strings.Trim(data, "\"")
+ }
+ sg.GenSchema.ExternalDocs = trimExternalDoc(sg.Schema.ExternalDocs)
+ sg.GenSchema.IsExported = true
+ sg.GenSchema.Path = sg.Path
+ sg.GenSchema.IndexVar = sg.IndexVar
+ sg.GenSchema.Location = body
+ sg.GenSchema.ValueExpression = sg.ValueExpr
+ sg.GenSchema.KeyVar = sg.KeyVar
+ sg.GenSchema.OriginalName = sg.Name
+ sg.GenSchema.Name = sg.GoName()
+ sg.GenSchema.Title = sg.Schema.Title
+ sg.GenSchema.Description = trimBOM(sg.Schema.Description)
+ sg.GenSchema.ReceiverName = sg.Receiver
+ sg.GenSchema.sharedValidations = sg.schemaValidations()
+ sg.GenSchema.ReadOnly = sg.Schema.ReadOnly
+ sg.GenSchema.IncludeValidator = sg.IncludeValidator
+ sg.GenSchema.IncludeModel = sg.IncludeModel
+ sg.GenSchema.StrictAdditionalProperties = sg.StrictAdditionalProperties
+ sg.GenSchema.Default = sg.Schema.Default
+ sg.GenSchema.StructTags = sg.StructTags
+ sg.GenSchema.ExtraImports = make(map[string]string)
+
+ var err error
+ returns, err := sg.shortCircuitNamedRef()
+ if err != nil {
+ return err
+ }
+ if returns {
+ return nil
+ }
+ debugLogAsJSON("after short circuit named ref", sg.Schema)
+
+ if e := sg.liftSpecialAllOf(); e != nil {
+ return e
+ }
+ nullableOverride := sg.GenSchema.IsNullable
+ debugLogAsJSON("after lifting special all of", sg.Schema)
+
+ if sg.Container == "" {
+ sg.Container = sg.GenSchema.Name
+ }
+ if e := sg.buildAllOf(); e != nil {
+ return e
+ }
+
+ var tpe resolvedType
+ if sg.Untyped {
+ tpe, err = sg.TypeResolver.ResolveSchema(nil, !sg.Named, sg.IsTuple || sg.Required || sg.GenSchema.Required)
+ } else {
+ tpe, err = sg.TypeResolver.ResolveSchema(&sg.Schema, !sg.Named, sg.IsTuple || sg.Required || sg.GenSchema.Required)
+ }
+ if err != nil {
+ return err
+ }
+
+ debugLog("gschema rrequired: %t, nullable: %t", sg.GenSchema.Required, sg.GenSchema.IsNullable)
+ tpe.IsNullable = tpe.IsNullable || nullableOverride
+ sg.GenSchema.resolvedType = tpe
+ sg.GenSchema.IsBaseType = tpe.IsBaseType
+ sg.GenSchema.HasDiscriminator = tpe.HasDiscriminator
+
+ // include format validations, excluding binary
+ sg.GenSchema.HasValidations = sg.GenSchema.HasValidations || hasFormatValidation(tpe)
+
+ // include context validations
+ sg.GenSchema.HasContextValidations = sg.GenSchema.HasContextValidations || hasContextValidations(&sg.Schema) && !tpe.IsInterface && !tpe.IsStream && !tpe.SkipExternalValidation
+
+ // usage of a polymorphic base type is rendered with getter funcs on private properties.
+ // In the case of aliased types, the value expression remains unchanged to the receiver.
+ if tpe.IsArray && tpe.ElemType != nil && tpe.ElemType.IsBaseType && sg.GenSchema.ValueExpression != sg.GenSchema.ReceiverName {
+ sg.GenSchema.ValueExpression += asMethod
+ }
+
+ if tpe.IsExternal { // anonymous external types
+ extType, pkg, alias := sg.TypeResolver.knownDefGoType(sg.GenSchema.Name, sg.Schema, sg.TypeResolver.goTypeName)
+ if pkg != "" && alias != "" {
+ sg.GenSchema.ExtraImports[alias] = pkg
+ }
+
+ if !tpe.IsEmbedded {
+ sg.GenSchema.resolvedType = tpe
+ sg.GenSchema.Required = sg.Required
+ // assume we validate everything but interface and io.Reader - validation may be disabled by using the noValidation hint
+ sg.GenSchema.HasValidations = !(tpe.IsInterface || tpe.IsStream || tpe.SkipExternalValidation)
+ sg.GenSchema.IsAliased = sg.GenSchema.HasValidations
+
+ log.Printf("INFO: type %s is external, with inferred spec type %s, referred to as %s", sg.GenSchema.Name, sg.GenSchema.GoType, extType)
+ sg.GenSchema.GoType = extType
+ sg.GenSchema.AliasedType = extType
+ return nil
+ }
+ // TODO: case for embedded types as anonymous definitions
+ return fmt.Errorf("ERROR: inline definitions embedded types are not supported")
+ }
+
+ debugLog("gschema nullable: %t", sg.GenSchema.IsNullable)
+ if e := sg.buildAdditionalProperties(); e != nil {
+ return e
+ }
+
+ // rewrite value expression from top-down
+ cur := &sg.GenSchema
+ for cur.AdditionalProperties != nil {
+ cur.AdditionalProperties.ValueExpression = cur.ValueExpression + "[" + cur.AdditionalProperties.KeyVar + "]"
+ cur = cur.AdditionalProperties
+ }
+
+ prev := sg.GenSchema
+ if sg.Untyped {
+ debugLogAsJSON("untyped resolve:%t", sg.Named || sg.IsTuple || sg.Required || sg.GenSchema.Required, sg.Schema)
+ tpe, err = sg.TypeResolver.ResolveSchema(nil, !sg.Named, sg.Named || sg.IsTuple || sg.Required || sg.GenSchema.Required)
+ } else {
+ debugLogAsJSON("typed resolve, isAnonymous(%t), n: %t, t: %t, sgr: %t, sr: %t, isRequired(%t), BaseType(%t)",
+ !sg.Named, sg.Named, sg.IsTuple, sg.Required, sg.GenSchema.Required,
+ sg.Named || sg.IsTuple || sg.Required || sg.GenSchema.Required, sg.GenSchema.IsBaseType, sg.Schema)
+ tpe, err = sg.TypeResolver.ResolveSchema(&sg.Schema, !sg.Named, sg.Named || sg.IsTuple || sg.Required || sg.GenSchema.Required)
+ }
+ if err != nil {
+ return err
+ }
+ otn := tpe.IsNullable // for debug only
+ tpe.IsNullable = tpe.IsNullable || nullableOverride
+ sg.GenSchema.resolvedType = tpe
+ sg.GenSchema.IsComplexObject = prev.IsComplexObject
+ sg.GenSchema.IsMap = prev.IsMap
+ sg.GenSchema.IsAdditionalProperties = prev.IsAdditionalProperties
+ sg.GenSchema.IsBaseType = sg.GenSchema.HasDiscriminator
+
+ debugLogAsJSON("gschema nnullable:IsNullable:%t,resolver.IsNullable:%t,nullableOverride:%t",
+ sg.GenSchema.IsNullable, otn, nullableOverride, sg.Schema)
+ if err := sg.buildProperties(); err != nil {
+ return err
+ }
+
+ if err := sg.buildXMLNameWithTags(); err != nil {
+ return err
+ }
+
+ if err := sg.buildAdditionalItems(); err != nil {
+ return err
+ }
+
+ if err := sg.buildItems(); err != nil {
+ return err
+ }
+
+ if err := sg.buildAliased(); err != nil {
+ return err
+ }
+
+ sg.buildMapOfNullable(nil)
+
+ // extra serializers & interfaces
+
+ // generate MarshalBinary for:
+ // - tuple
+ // - struct
+ // - map
+ // - aliased primitive of a formatter type which is not a stringer
+ //
+ // but not for:
+ // - interface{}
+ // - io.Reader
+ gs := sg.GenSchema
+ sg.GenSchema.WantsMarshalBinary = !(gs.IsInterface || gs.IsStream || gs.IsBaseType) &&
+ (gs.IsTuple || gs.IsComplexObject || gs.IsAdditionalProperties || (gs.IsPrimitive && gs.IsAliased && gs.IsCustomFormatter && !strings.Contains(gs.Zero(), `("`)))
+
+ debugLog("finished gen schema for %q", sg.Name)
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/operation.go b/vendor/github.com/go-swagger/go-swagger/generator/operation.go
new file mode 100644
index 000000000..8f4b8b2f6
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/operation.go
@@ -0,0 +1,1303 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+type respSort struct {
+ Code int
+ Response spec.Response
+}
+
+type responses []respSort
+
+func (s responses) Len() int { return len(s) }
+func (s responses) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
+func (s responses) Less(i, j int) bool { return s[i].Code < s[j].Code }
+
+// sortedResponses produces a sorted list of responses.
+// TODO: this is redundant with the definition given in struct.go
+func sortedResponses(input map[int]spec.Response) responses {
+ var res responses
+ for k, v := range input {
+ if k > 0 {
+ res = append(res, respSort{k, v})
+ }
+ }
+ sort.Sort(res)
+ return res
+}
+
+// GenerateServerOperation generates a parameter model, parameter validator, http handler implementations for a given operation.
+//
+// It also generates an operation handler interface that uses the parameter model for handling a valid request.
+// Allows for specifying a list of tags to include only certain tags for the generation
+func GenerateServerOperation(operationNames []string, opts *GenOpts) error {
+ if err := opts.CheckOpts(); err != nil {
+ return err
+ }
+
+ if err := opts.setTemplates(); err != nil {
+ return err
+ }
+
+ specDoc, analyzed, err := opts.analyzeSpec()
+ if err != nil {
+ return err
+ }
+
+ ops := gatherOperations(analyzed, operationNames)
+
+ if len(ops) == 0 {
+ return errors.New("no operations were selected")
+ }
+
+ for operationName, opRef := range ops {
+ method, path, operation := opRef.Method, opRef.Path, opRef.Op
+
+ serverPackage := opts.LanguageOpts.ManglePackagePath(opts.ServerPackage, defaultServerTarget)
+ generator := operationGenerator{
+ Name: operationName,
+ Method: method,
+ Path: path,
+ BasePath: specDoc.BasePath(),
+ APIPackage: opts.LanguageOpts.ManglePackagePath(opts.APIPackage, defaultOperationsTarget),
+ ModelsPackage: opts.LanguageOpts.ManglePackagePath(opts.ModelPackage, defaultModelsTarget),
+ ClientPackage: opts.LanguageOpts.ManglePackagePath(opts.ClientPackage, defaultClientTarget),
+ ServerPackage: serverPackage,
+ Operation: *operation,
+ SecurityRequirements: analyzed.SecurityRequirementsFor(operation),
+ SecurityDefinitions: analyzed.SecurityDefinitionsFor(operation),
+ Principal: opts.PrincipalAlias(),
+ Target: filepath.Join(opts.Target, filepath.FromSlash(serverPackage)),
+ Base: opts.Target,
+ Tags: opts.Tags,
+ IncludeHandler: opts.IncludeHandler,
+ IncludeParameters: opts.IncludeParameters,
+ IncludeResponses: opts.IncludeResponses,
+ IncludeValidator: opts.IncludeValidator,
+ DumpData: opts.DumpData,
+ DefaultScheme: opts.DefaultScheme,
+ DefaultProduces: opts.DefaultProduces,
+ DefaultConsumes: opts.DefaultConsumes,
+ Doc: specDoc,
+ Analyzed: analyzed,
+ GenOpts: opts,
+ }
+ if err := generator.Generate(); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+type operationGenerator struct {
+ Authorized bool
+ IncludeHandler bool
+ IncludeParameters bool
+ IncludeResponses bool
+ IncludeValidator bool
+ DumpData bool
+
+ Principal string
+ Target string
+ Base string
+ Name string
+ Method string
+ Path string
+ BasePath string
+ APIPackage string
+ ModelsPackage string
+ ServerPackage string
+ ClientPackage string
+ Operation spec.Operation
+ SecurityRequirements [][]analysis.SecurityRequirement
+ SecurityDefinitions map[string]spec.SecurityScheme
+ Tags []string
+ DefaultScheme string
+ DefaultProduces string
+ DefaultConsumes string
+ Doc *loads.Document
+ Analyzed *analysis.Spec
+ GenOpts *GenOpts
+}
+
+// Generate a single operation
+func (o *operationGenerator) Generate() error {
+
+ defaultImports := o.GenOpts.defaultImports()
+
+ apiPackage := o.GenOpts.LanguageOpts.ManglePackagePath(o.GenOpts.APIPackage, defaultOperationsTarget)
+ imports := o.GenOpts.initImports(
+ filepath.Join(o.GenOpts.LanguageOpts.ManglePackagePath(o.GenOpts.ServerPackage, defaultServerTarget), apiPackage))
+
+ bldr := codeGenOpBuilder{
+ ModelsPackage: o.ModelsPackage,
+ Principal: o.GenOpts.PrincipalAlias(),
+ Target: o.Target,
+ DefaultImports: defaultImports,
+ Imports: imports,
+ DefaultScheme: o.DefaultScheme,
+ Doc: o.Doc,
+ Analyzed: o.Analyzed,
+ BasePath: o.BasePath,
+ GenOpts: o.GenOpts,
+ Name: o.Name,
+ Operation: o.Operation,
+ Method: o.Method,
+ Path: o.Path,
+ IncludeValidator: o.IncludeValidator,
+ APIPackage: o.APIPackage, // defaults to main operations package
+ DefaultProduces: o.DefaultProduces,
+ DefaultConsumes: o.DefaultConsumes,
+ Authed: len(o.Analyzed.SecurityRequirementsFor(&o.Operation)) > 0,
+ Security: o.Analyzed.SecurityRequirementsFor(&o.Operation),
+ SecurityDefinitions: o.Analyzed.SecurityDefinitionsFor(&o.Operation),
+ RootAPIPackage: o.GenOpts.LanguageOpts.ManglePackageName(o.ServerPackage, defaultServerTarget),
+ }
+
+ _, tags, _ := bldr.analyzeTags()
+
+ op, err := bldr.MakeOperation()
+ if err != nil {
+ return err
+ }
+
+ op.Tags = tags
+ operations := make(GenOperations, 0, 1)
+ operations = append(operations, op)
+ sort.Sort(operations)
+
+ for _, pp := range operations {
+ op := pp
+ if o.GenOpts.DumpData {
+ _ = dumpData(swag.ToDynamicJSON(op))
+ continue
+ }
+ if err := o.GenOpts.renderOperation(&op); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+type codeGenOpBuilder struct {
+ Authed bool
+ IncludeValidator bool
+
+ Name string
+ Method string
+ Path string
+ BasePath string
+ APIPackage string
+ APIPackageAlias string
+ RootAPIPackage string
+ ModelsPackage string
+ Principal string
+ Target string
+ Operation spec.Operation
+ Doc *loads.Document
+ PristineDoc *loads.Document
+ Analyzed *analysis.Spec
+ DefaultImports map[string]string
+ Imports map[string]string
+ DefaultScheme string
+ DefaultProduces string
+ DefaultConsumes string
+ Security [][]analysis.SecurityRequirement
+ SecurityDefinitions map[string]spec.SecurityScheme
+ ExtraSchemas map[string]GenSchema
+ GenOpts *GenOpts
+}
+
+// paramMappings yields a map of safe parameter names for an operation
+func paramMappings(params map[string]spec.Parameter) (map[string]map[string]string, string) {
+ idMapping := map[string]map[string]string{
+ "query": make(map[string]string, len(params)),
+ "path": make(map[string]string, len(params)),
+ "formData": make(map[string]string, len(params)),
+ "header": make(map[string]string, len(params)),
+ "body": make(map[string]string, len(params)),
+ }
+
+ // In order to avoid unstable generation, adopt same naming convention
+ // for all parameters with same name across locations.
+ seenIds := make(map[string]interface{}, len(params))
+ for id, p := range params {
+ if val, ok := seenIds[p.Name]; ok {
+ previous := val.(struct{ id, in string })
+ idMapping[p.In][p.Name] = swag.ToGoName(id)
+ // rewrite the previously found one
+ idMapping[previous.in][p.Name] = swag.ToGoName(previous.id)
+ } else {
+ idMapping[p.In][p.Name] = swag.ToGoName(p.Name)
+ }
+ seenIds[strings.ToLower(idMapping[p.In][p.Name])] = struct{ id, in string }{id: id, in: p.In}
+ }
+
+ // pick a deconflicted private name for timeout for this operation
+ timeoutName := renameTimeout(seenIds, "timeout")
+
+ return idMapping, timeoutName
+}
+
+// renameTimeout renames the variable in use by client template to avoid conflicting
+// with param names.
+//
+// NOTE: this merely protects the timeout field in the client parameter struct,
+// fields "Context" and "HTTPClient" remain exposed to name conflicts.
+func renameTimeout(seenIds map[string]interface{}, timeoutName string) string {
+ if seenIds == nil {
+ return timeoutName
+ }
+ current := strings.ToLower(timeoutName)
+ if _, ok := seenIds[current]; !ok {
+ return timeoutName
+ }
+ var next string
+ switch current {
+ case "timeout":
+ next = "requestTimeout"
+ case "requesttimeout":
+ next = "httpRequestTimeout"
+ case "httprequesttimeout":
+ next = "swaggerTimeout"
+ case "swaggertimeout":
+ next = "operationTimeout"
+ case "operationtimeout":
+ next = "opTimeout"
+ case "optimeout":
+ next = "operTimeout"
+ default:
+ next = timeoutName + "1"
+ }
+ return renameTimeout(seenIds, next)
+}
+
+func (b *codeGenOpBuilder) MakeOperation() (GenOperation, error) {
+ debugLog("[%s %s] parsing operation (id: %q)", b.Method, b.Path, b.Operation.ID)
+ // NOTE: we assume flatten is enabled by default (i.e. complex constructs are resolved from the models package),
+ // but do not assume the spec is necessarily fully flattened (i.e. all schemas moved to definitions).
+ //
+ // Fully flattened means that all complex constructs are present as
+ // definitions and models produced accordingly in ModelsPackage,
+ // whereas minimal flatten simply ensures that there are no weird $ref's in the spec.
+ //
+ // When some complex anonymous constructs are specified, extra schemas are produced in the operations package.
+ //
+ // In all cases, resetting definitions to the _original_ (untransformed) spec is not an option:
+ // we take from there the spec possibly already transformed by the GenDefinitions stage.
+ resolver := newTypeResolver(b.GenOpts.LanguageOpts.ManglePackageName(b.ModelsPackage, defaultModelsTarget), b.DefaultImports[b.ModelsPackage], b.Doc)
+ receiver := "o"
+
+ operation := b.Operation
+ var params, qp, pp, hp, fp GenParameters
+ var hasQueryParams, hasPathParams, hasHeaderParams, hasFormParams, hasFileParams, hasFormValueParams, hasBodyParams bool
+ paramsForOperation := b.Analyzed.ParamsFor(b.Method, b.Path)
+
+ idMapping, timeoutName := paramMappings(paramsForOperation)
+
+ for _, p := range paramsForOperation {
+ cp, err := b.MakeParameter(receiver, resolver, p, idMapping)
+
+ if err != nil {
+ return GenOperation{}, err
+ }
+ if cp.IsQueryParam() {
+ hasQueryParams = true
+ qp = append(qp, cp)
+ }
+ if cp.IsFormParam() {
+ if p.Type == file {
+ hasFileParams = true
+ }
+ if p.Type != file {
+ hasFormValueParams = true
+ }
+ hasFormParams = true
+ fp = append(fp, cp)
+ }
+ if cp.IsPathParam() {
+ hasPathParams = true
+ pp = append(pp, cp)
+ }
+ if cp.IsHeaderParam() {
+ hasHeaderParams = true
+ hp = append(hp, cp)
+ }
+ if cp.IsBodyParam() {
+ hasBodyParams = true
+ }
+ params = append(params, cp)
+ }
+ sort.Sort(params)
+ sort.Sort(qp)
+ sort.Sort(pp)
+ sort.Sort(hp)
+ sort.Sort(fp)
+
+ var srs responses
+ if operation.Responses != nil {
+ srs = sortedResponses(operation.Responses.StatusCodeResponses)
+ }
+ responses := make([]GenResponse, 0, len(srs))
+ var defaultResponse *GenResponse
+ var successResponses []GenResponse
+ if operation.Responses != nil {
+ for _, v := range srs {
+ name, ok := v.Response.Extensions.GetString(xGoName)
+ if !ok {
+ // look for name of well-known codes
+ name = runtime.Statuses[v.Code]
+ if name == "" {
+ // non-standard codes deserve some name
+ name = fmt.Sprintf("Status %d", v.Code)
+ }
+ }
+ name = swag.ToJSONName(b.Name + " " + name)
+ isSuccess := v.Code/100 == 2
+ gr, err := b.MakeResponse(receiver, name, isSuccess, resolver, v.Code, v.Response)
+ if err != nil {
+ return GenOperation{}, err
+ }
+ if isSuccess {
+ successResponses = append(successResponses, gr)
+ }
+ responses = append(responses, gr)
+ }
+
+ if operation.Responses.Default != nil {
+ gr, err := b.MakeResponse(receiver, b.Name+" default", false, resolver, -1, *operation.Responses.Default)
+ if err != nil {
+ return GenOperation{}, err
+ }
+ defaultResponse = &gr
+ }
+ }
+
+ // Always render a default response, even when no responses were defined
+ if operation.Responses == nil || (operation.Responses.Default == nil && len(srs) == 0) {
+ gr, err := b.MakeResponse(receiver, b.Name+" default", false, resolver, -1, spec.Response{})
+ if err != nil {
+ return GenOperation{}, err
+ }
+ defaultResponse = &gr
+ }
+
+ swsp := resolver.Doc.Spec()
+
+ schemes, extraSchemes := gatherURISchemes(swsp, operation)
+ originalSchemes := operation.Schemes
+ originalExtraSchemes := getExtraSchemes(operation.Extensions)
+
+ produces := producesOrDefault(operation.Produces, swsp.Produces, b.DefaultProduces)
+ sort.Strings(produces)
+
+ consumes := producesOrDefault(operation.Consumes, swsp.Consumes, b.DefaultConsumes)
+ sort.Strings(consumes)
+
+ var successResponse *GenResponse
+ for _, resp := range successResponses {
+ sr := resp
+ if sr.IsSuccess {
+ successResponse = &sr
+ break
+ }
+ }
+
+ var hasStreamingResponse bool
+ if defaultResponse != nil && defaultResponse.Schema != nil && defaultResponse.Schema.IsStream {
+ hasStreamingResponse = true
+ }
+
+ if !hasStreamingResponse {
+ for _, sr := range successResponses {
+ if !hasStreamingResponse && sr.Schema != nil && sr.Schema.IsStream {
+ hasStreamingResponse = true
+ break
+ }
+ }
+ }
+
+ if !hasStreamingResponse {
+ for _, r := range responses {
+ if r.Schema != nil && r.Schema.IsStream {
+ hasStreamingResponse = true
+ break
+ }
+ }
+ }
+
+ return GenOperation{
+ GenCommon: GenCommon{
+ Copyright: b.GenOpts.Copyright,
+ TargetImportPath: b.GenOpts.LanguageOpts.baseImport(b.GenOpts.Target),
+ },
+ Package: b.GenOpts.LanguageOpts.ManglePackageName(b.APIPackage, defaultOperationsTarget),
+ PackageAlias: b.APIPackageAlias,
+ RootPackage: b.RootAPIPackage,
+ Name: b.Name,
+ Method: b.Method,
+ Path: b.Path,
+ BasePath: b.BasePath,
+ Tags: operation.Tags,
+ UseTags: len(operation.Tags) > 0 && !b.GenOpts.SkipTagPackages,
+ Description: trimBOM(operation.Description),
+ ReceiverName: receiver,
+ DefaultImports: b.DefaultImports,
+ Imports: b.Imports,
+ Params: params,
+ Summary: trimBOM(operation.Summary),
+ QueryParams: qp,
+ PathParams: pp,
+ HeaderParams: hp,
+ FormParams: fp,
+ HasQueryParams: hasQueryParams,
+ HasPathParams: hasPathParams,
+ HasHeaderParams: hasHeaderParams,
+ HasFormParams: hasFormParams,
+ HasFormValueParams: hasFormValueParams,
+ HasFileParams: hasFileParams,
+ HasBodyParams: hasBodyParams,
+ HasStreamingResponse: hasStreamingResponse,
+ Authorized: b.Authed,
+ Security: b.makeSecurityRequirements(receiver), // resolved security requirements, for codegen
+ SecurityDefinitions: b.makeSecuritySchemes(receiver),
+ SecurityRequirements: securityRequirements(operation.Security), // raw security requirements, for doc
+ Principal: b.Principal,
+ Responses: responses,
+ DefaultResponse: defaultResponse,
+ SuccessResponse: successResponse,
+ SuccessResponses: successResponses,
+ ExtraSchemas: gatherExtraSchemas(b.ExtraSchemas),
+ Schemes: schemeOrDefault(schemes, b.DefaultScheme),
+ SchemeOverrides: originalSchemes, // raw operation schemes, for doc
+ ProducesMediaTypes: produces, // resolved produces, for codegen
+ ConsumesMediaTypes: consumes, // resolved consumes, for codegen
+ Produces: operation.Produces, // for doc
+ Consumes: operation.Consumes, // for doc
+ ExtraSchemes: extraSchemes, // resolved schemes, for codegen
+ ExtraSchemeOverrides: originalExtraSchemes, // raw operation extra schemes, for doc
+ TimeoutName: timeoutName,
+ Extensions: operation.Extensions,
+ StrictResponders: b.GenOpts.StrictResponders,
+
+ PrincipalIsNullable: b.GenOpts.PrincipalIsNullable(),
+ ExternalDocs: trimExternalDoc(operation.ExternalDocs),
+ }, nil
+}
+
+func producesOrDefault(produces []string, fallback []string, defaultProduces string) []string {
+ if len(produces) > 0 {
+ return produces
+ }
+ if len(fallback) > 0 {
+ return fallback
+ }
+ return []string{defaultProduces}
+}
+
+func schemeOrDefault(schemes []string, defaultScheme string) []string {
+ if len(schemes) == 0 {
+ return []string{defaultScheme}
+ }
+ return schemes
+}
+
+func (b *codeGenOpBuilder) MakeResponse(receiver, name string, isSuccess bool, resolver *typeResolver, code int, resp spec.Response) (GenResponse, error) {
+ debugLog("[%s %s] making id %q", b.Method, b.Path, b.Operation.ID)
+
+ // assume minimal flattening has been carried on, so there is not $ref in response (but some may remain in response schema)
+ examples := make(GenResponseExamples, 0, len(resp.Examples))
+ for k, v := range resp.Examples {
+ examples = append(examples, GenResponseExample{MediaType: k, Example: v})
+ }
+ sort.Sort(examples)
+
+ res := GenResponse{
+ Package: b.GenOpts.LanguageOpts.ManglePackageName(b.APIPackage, defaultOperationsTarget),
+ ModelsPackage: b.ModelsPackage,
+ ReceiverName: receiver,
+ Name: name,
+ Description: trimBOM(resp.Description),
+ DefaultImports: b.DefaultImports,
+ Imports: b.Imports,
+ IsSuccess: isSuccess,
+ Code: code,
+ Method: b.Method,
+ Path: b.Path,
+ Extensions: resp.Extensions,
+ StrictResponders: b.GenOpts.StrictResponders,
+ OperationName: b.Name,
+ Examples: examples,
+ }
+
+ // prepare response headers
+ for hName, header := range resp.Headers {
+ hdr, err := b.MakeHeader(receiver, hName, header)
+ if err != nil {
+ return GenResponse{}, err
+ }
+ res.Headers = append(res.Headers, hdr)
+ }
+ sort.Sort(res.Headers)
+
+ if resp.Schema != nil {
+ // resolve schema model
+ schema, ers := b.buildOperationSchema(fmt.Sprintf("%q", name), name+"Body", swag.ToGoName(name+"Body"), receiver, "i", resp.Schema, resolver)
+ if ers != nil {
+ return GenResponse{}, ers
+ }
+ res.Schema = &schema
+ }
+ return res, nil
+}
+
+func (b *codeGenOpBuilder) MakeHeader(receiver, name string, hdr spec.Header) (GenHeader, error) {
+ tpe := simpleResolvedType(hdr.Type, hdr.Format, hdr.Items, &hdr.CommonValidations)
+
+ id := swag.ToGoName(name)
+ res := GenHeader{
+ sharedValidations: sharedValidations{
+ Required: true,
+ SchemaValidations: hdr.Validations(), // NOTE: Required is not defined by the Swagger schema for header. Set arbitrarily to true for convenience in templates.
+ },
+ resolvedType: tpe,
+ Package: b.GenOpts.LanguageOpts.ManglePackageName(b.APIPackage, defaultOperationsTarget),
+ ReceiverName: receiver,
+ ID: id,
+ Name: name,
+ Path: fmt.Sprintf("%q", name),
+ ValueExpression: fmt.Sprintf("%s.%s", receiver, id),
+ Description: trimBOM(hdr.Description),
+ Default: hdr.Default,
+ HasDefault: hdr.Default != nil,
+ Converter: stringConverters[tpe.GoType],
+ Formatter: stringFormatters[tpe.GoType],
+ ZeroValue: tpe.Zero(),
+ CollectionFormat: hdr.CollectionFormat,
+ IndexVar: "i",
+ }
+ res.HasValidations, res.HasSliceValidations = b.HasValidations(hdr.CommonValidations, res.resolvedType)
+
+ hasChildValidations := false
+ if hdr.Items != nil {
+ pi, err := b.MakeHeaderItem(receiver, name+" "+res.IndexVar, res.IndexVar+"i", "fmt.Sprintf(\"%s.%v\", \"header\", "+res.IndexVar+")", res.Name+"I", hdr.Items, nil)
+ if err != nil {
+ return GenHeader{}, err
+ }
+ res.Child = &pi
+ hasChildValidations = pi.HasValidations
+ }
+ // we feed the GenHeader structure the same way as we do for
+ // GenParameter, even though there is currently no actual validation
+ // for response headers.
+ res.HasValidations = res.HasValidations || hasChildValidations
+
+ return res, nil
+}
+
+func (b *codeGenOpBuilder) MakeHeaderItem(receiver, paramName, indexVar, path, valueExpression string, items, _ *spec.Items) (GenItems, error) {
+ var res GenItems
+ res.resolvedType = simpleResolvedType(items.Type, items.Format, items.Items, &items.CommonValidations)
+
+ res.sharedValidations = sharedValidations{
+ Required: false,
+ SchemaValidations: items.Validations(),
+ }
+ res.Name = paramName
+ res.Path = path
+ res.Location = "header"
+ res.ValueExpression = swag.ToVarName(valueExpression)
+ res.CollectionFormat = items.CollectionFormat
+ res.Converter = stringConverters[res.GoType]
+ res.Formatter = stringFormatters[res.GoType]
+ res.IndexVar = indexVar
+ res.HasValidations, res.HasSliceValidations = b.HasValidations(items.CommonValidations, res.resolvedType)
+ res.IsEnumCI = b.GenOpts.AllowEnumCI || hasEnumCI(items.Extensions)
+
+ if items.Items != nil {
+ // Recursively follows nested arrays
+ // IMPORTANT! transmitting a ValueExpression consistent with the parent's one
+ hi, err := b.MakeHeaderItem(receiver, paramName+" "+indexVar, indexVar+"i", "fmt.Sprintf(\"%s.%v\", \"header\", "+indexVar+")", res.ValueExpression+"I", items.Items, items)
+ if err != nil {
+ return GenItems{}, err
+ }
+ res.Child = &hi
+ hi.Parent = &res
+ // Propagates HasValidations flag to outer Items definition (currently not in use: done to remain consistent with parameters)
+ res.HasValidations = res.HasValidations || hi.HasValidations
+ }
+
+ return res, nil
+}
+
+// HasValidations resolves the validation status for simple schema objects
+func (b *codeGenOpBuilder) HasValidations(sh spec.CommonValidations, rt resolvedType) (hasValidations bool, hasSliceValidations bool) {
+ hasSliceValidations = sh.HasArrayValidations() || sh.HasEnum()
+ hasValidations = sh.HasNumberValidations() || sh.HasStringValidations() || hasSliceValidations || hasFormatValidation(rt)
+ return
+}
+
+func (b *codeGenOpBuilder) MakeParameterItem(receiver, paramName, indexVar, path, valueExpression, location string, resolver *typeResolver, items, _ *spec.Items) (GenItems, error) {
+ debugLog("making parameter item recv=%s param=%s index=%s valueExpr=%s path=%s location=%s", receiver, paramName, indexVar, valueExpression, path, location)
+ var res GenItems
+ res.resolvedType = simpleResolvedType(items.Type, items.Format, items.Items, &items.CommonValidations)
+
+ res.sharedValidations = sharedValidations{
+ Required: false,
+ SchemaValidations: items.Validations(),
+ }
+ res.Name = paramName
+ res.Path = path
+ res.Location = location
+ res.ValueExpression = swag.ToVarName(valueExpression)
+ res.CollectionFormat = items.CollectionFormat
+ res.Converter = stringConverters[res.GoType]
+ res.Formatter = stringFormatters[res.GoType]
+ res.IndexVar = indexVar
+
+ res.HasValidations, res.HasSliceValidations = b.HasValidations(items.CommonValidations, res.resolvedType)
+ res.IsEnumCI = b.GenOpts.AllowEnumCI || hasEnumCI(items.Extensions)
+ res.NeedsIndex = res.HasValidations || res.Converter != "" || (res.IsCustomFormatter && !res.SkipParse)
+
+ if items.Items != nil {
+ // Recursively follows nested arrays
+ // IMPORTANT! transmitting a ValueExpression consistent with the parent's one
+ pi, err := b.MakeParameterItem(receiver, paramName+" "+indexVar, indexVar+"i", "fmt.Sprintf(\"%s.%v\", "+path+", "+indexVar+")", res.ValueExpression+"I", location, resolver, items.Items, items)
+ if err != nil {
+ return GenItems{}, err
+ }
+ res.Child = &pi
+ pi.Parent = &res
+ // Propagates HasValidations flag to outer Items definition
+ res.HasValidations = res.HasValidations || pi.HasValidations
+ res.NeedsIndex = res.NeedsIndex || pi.NeedsIndex
+ }
+
+ return res, nil
+}
+
+func (b *codeGenOpBuilder) MakeParameter(receiver string, resolver *typeResolver, param spec.Parameter, idMapping map[string]map[string]string) (GenParameter, error) {
+ debugLog("[%s %s] making parameter %q", b.Method, b.Path, param.Name)
+
+ // assume minimal flattening has been carried on, so there is not $ref in response (but some may remain in response schema)
+
+ var child *GenItems
+ id := swag.ToGoName(param.Name)
+ if goName, ok := param.Extensions["x-go-name"]; ok {
+ id, ok = goName.(string)
+ if !ok {
+ return GenParameter{}, fmt.Errorf(`%s %s, parameter %q: "x-go-name" field must be a string, not a %T`,
+ b.Method, b.Path, param.Name, goName)
+ }
+ } else if len(idMapping) > 0 {
+ id = idMapping[param.In][param.Name]
+ }
+
+ res := GenParameter{
+ ID: id,
+ Name: param.Name,
+ ModelsPackage: b.ModelsPackage,
+ Path: fmt.Sprintf("%q", param.Name),
+ ValueExpression: fmt.Sprintf("%s.%s", receiver, id),
+ IndexVar: "i",
+ Default: param.Default,
+ HasDefault: param.Default != nil,
+ Description: trimBOM(param.Description),
+ ReceiverName: receiver,
+ CollectionFormat: param.CollectionFormat,
+ Child: child,
+ Location: param.In,
+ AllowEmptyValue: (param.In == "query" || param.In == "formData") && param.AllowEmptyValue,
+ Extensions: param.Extensions,
+ }
+
+ if param.In == "body" {
+ // Process parameters declared in body (i.e. have a Schema)
+ res.Required = param.Required
+ if err := b.MakeBodyParameter(&res, resolver, param.Schema); err != nil {
+ return GenParameter{}, err
+ }
+ } else {
+ // Process parameters declared in other inputs: path, query, header (SimpleSchema)
+ res.resolvedType = simpleResolvedType(param.Type, param.Format, param.Items, &param.CommonValidations)
+ res.sharedValidations = sharedValidations{
+ Required: param.Required,
+ SchemaValidations: param.Validations(),
+ }
+
+ res.ZeroValue = res.resolvedType.Zero()
+
+ hasChildValidations := false
+ if param.Items != nil {
+ // Follow Items definition for array parameters
+ pi, err := b.MakeParameterItem(receiver, param.Name+" "+res.IndexVar, res.IndexVar+"i", "fmt.Sprintf(\"%s.%v\", "+res.Path+", "+res.IndexVar+")", res.Name+"I", param.In, resolver, param.Items, nil)
+ if err != nil {
+ return GenParameter{}, err
+ }
+ res.Child = &pi
+ // Propagates HasValidations from from child array
+ hasChildValidations = pi.HasValidations
+ }
+ res.IsNullable = !param.Required && !param.AllowEmptyValue
+ res.HasValidations, res.HasSliceValidations = b.HasValidations(param.CommonValidations, res.resolvedType)
+ res.HasValidations = res.HasValidations || hasChildValidations
+ res.IsEnumCI = b.GenOpts.AllowEnumCI || hasEnumCI(param.Extensions)
+ }
+
+ // Select codegen strategy for body param validation
+ res.Converter = stringConverters[res.GoType]
+ res.Formatter = stringFormatters[res.GoType]
+ b.setBodyParamValidation(&res)
+
+ return res, nil
+}
+
+// MakeBodyParameter constructs a body parameter schema
+func (b *codeGenOpBuilder) MakeBodyParameter(res *GenParameter, resolver *typeResolver, sch *spec.Schema) error {
+ // resolve schema model
+ schema, ers := b.buildOperationSchema(res.Path, b.Operation.ID+"ParamsBody", swag.ToGoName(b.Operation.ID+" Body"), res.ReceiverName, res.IndexVar, sch, resolver)
+ if ers != nil {
+ return ers
+ }
+ res.Schema = &schema
+ res.Schema.Required = res.Required // Required in body is managed independently from validations
+
+ // build Child items for nested slices and maps
+ var items *GenItems
+ res.KeyVar = "k"
+ res.Schema.KeyVar = "k"
+ switch {
+ case schema.IsMap && !schema.IsInterface:
+ items = b.MakeBodyParameterItemsAndMaps(res, res.Schema.AdditionalProperties)
+ case schema.IsArray:
+ items = b.MakeBodyParameterItemsAndMaps(res, res.Schema.Items)
+ default:
+ items = new(GenItems)
+ }
+
+ // templates assume at least one .Child != nil
+ res.Child = items
+ schema.HasValidations = schema.HasValidations || items.HasValidations
+
+ res.resolvedType = schema.resolvedType
+
+ // simple and schema views share the same validations
+ res.sharedValidations = schema.sharedValidations
+ res.ZeroValue = schema.Zero()
+ return nil
+}
+
+// MakeBodyParameterItemsAndMaps clones the .Items schema structure (resp. .AdditionalProperties) as a .GenItems structure
+// for compatibility with simple param templates.
+//
+// Constructed children assume simple structures: any complex object is assumed to be resolved by a model or extra schema definition
+func (b *codeGenOpBuilder) MakeBodyParameterItemsAndMaps(res *GenParameter, it *GenSchema) *GenItems {
+ items := new(GenItems)
+ if it != nil {
+ var prev *GenItems
+ next := items
+ if res.Schema.IsArray {
+ next.Path = "fmt.Sprintf(\"%s.%v\", " + res.Path + ", " + res.IndexVar + ")"
+ } else if res.Schema.IsMap {
+ next.Path = "fmt.Sprintf(\"%s.%v\", " + res.Path + ", " + res.KeyVar + ")"
+ }
+ next.Name = res.Name + " " + res.Schema.IndexVar
+ next.IndexVar = res.Schema.IndexVar + "i"
+ next.KeyVar = res.Schema.KeyVar + "k"
+ next.ValueExpression = swag.ToVarName(res.Name + "I")
+ next.Location = "body"
+ for it != nil {
+ next.resolvedType = it.resolvedType
+ next.sharedValidations = it.sharedValidations
+ next.Formatter = stringFormatters[it.SwaggerFormat]
+ next.Converter = stringConverters[res.GoType]
+ next.Parent = prev
+ _, next.IsCustomFormatter = customFormatters[it.GoType]
+ next.IsCustomFormatter = next.IsCustomFormatter && !it.IsStream
+
+ // special instruction to avoid using CollectionFormat for body params
+ next.SkipParse = true
+
+ if prev != nil {
+ if prev.IsArray {
+ next.Path = "fmt.Sprintf(\"%s.%v\", " + prev.Path + ", " + prev.IndexVar + ")"
+ } else if prev.IsMap {
+ next.Path = "fmt.Sprintf(\"%s.%v\", " + prev.Path + ", " + prev.KeyVar + ")"
+ }
+ next.Name = prev.Name + prev.IndexVar
+ next.IndexVar = prev.IndexVar + "i"
+ next.KeyVar = prev.KeyVar + "k"
+ next.ValueExpression = swag.ToVarName(prev.ValueExpression + "I")
+ prev.Child = next
+ }
+
+ // found a complex or aliased thing
+ // hide details from the aliased type and stop recursing
+ if next.IsAliased || next.IsComplexObject {
+ next.IsArray = false
+ next.IsMap = false
+ next.IsCustomFormatter = false
+ next.IsComplexObject = true
+ next.IsAliased = true
+ break
+ }
+ if next.IsInterface || next.IsStream || next.IsBase64 {
+ next.HasValidations = false
+ }
+ next.NeedsIndex = next.HasValidations || next.Converter != "" || (next.IsCustomFormatter && !next.SkipParse)
+ prev = next
+ next = new(GenItems)
+
+ switch {
+ case it.Items != nil:
+ it = it.Items
+ case it.AdditionalProperties != nil:
+ it = it.AdditionalProperties
+ default:
+ it = nil
+ }
+ }
+ // propagate HasValidations
+ var propag func(child *GenItems) (bool, bool)
+ propag = func(child *GenItems) (bool, bool) {
+ if child == nil {
+ return false, false
+ }
+ cValidations, cIndex := propag(child.Child)
+ child.HasValidations = child.HasValidations || cValidations
+ child.NeedsIndex = child.HasValidations || child.Converter != "" || (child.IsCustomFormatter && !child.SkipParse) || cIndex
+ return child.HasValidations, child.NeedsIndex
+ }
+ items.HasValidations, items.NeedsIndex = propag(items)
+
+ // resolve nullability conflicts when declaring body as a map of array of an anonymous complex object
+ // (e.g. refer to an extra schema type, which is nullable, but not rendered as a pointer in arrays or maps)
+ // Rule: outer type rules (with IsMapNullOverride), inner types are fixed
+ var fixNullable func(child *GenItems) string
+ fixNullable = func(child *GenItems) string {
+ if !child.IsArray && !child.IsMap {
+ if child.IsComplexObject {
+ return child.GoType
+ }
+ return ""
+ }
+ if innerType := fixNullable(child.Child); innerType != "" {
+ if child.IsMapNullOverride && child.IsArray {
+ child.GoType = "[]" + innerType
+ return child.GoType
+ }
+ }
+ return ""
+ }
+ fixNullable(items)
+ }
+ return items
+}
+
+func (b *codeGenOpBuilder) setBodyParamValidation(p *GenParameter) {
+ // Determine validation strategy for body param.
+ //
+ // Here are the distinct strategies:
+ // - the body parameter is a model object => delegates
+ // - the body parameter is an array of model objects => carry on slice validations, then iterate and delegate
+ // - the body parameter is a map of model objects => iterate and delegate
+ // - the body parameter is an array of simple objects (including maps)
+ // - the body parameter is a map of simple objects (including arrays)
+ if p.IsBodyParam() {
+ var hasSimpleBodyParams, hasSimpleBodyItems, hasSimpleBodyMap, hasModelBodyParams, hasModelBodyItems, hasModelBodyMap bool
+ s := p.Schema
+ if s != nil {
+ doNot := s.IsInterface || s.IsStream || s.IsBase64
+ // composition of primitive fields must be properly identified: hack this through
+ _, isPrimitive := primitives[s.GoType]
+ _, isFormatter := customFormatters[s.GoType]
+ isComposedPrimitive := s.IsPrimitive && !(isPrimitive || isFormatter)
+
+ hasSimpleBodyParams = !s.IsComplexObject && !s.IsAliased && !isComposedPrimitive && !doNot
+ hasModelBodyParams = (s.IsComplexObject || s.IsAliased || isComposedPrimitive) && !doNot
+
+ if s.IsArray && s.Items != nil {
+ it := s.Items
+ doNot = it.IsInterface || it.IsStream || it.IsBase64
+ hasSimpleBodyItems = !it.IsComplexObject && !(it.IsAliased || doNot)
+ hasModelBodyItems = (it.IsComplexObject || it.IsAliased) && !doNot
+ }
+ if s.IsMap && s.AdditionalProperties != nil {
+ it := s.AdditionalProperties
+ hasSimpleBodyMap = !it.IsComplexObject && !(it.IsAliased || doNot)
+ hasModelBodyMap = !hasSimpleBodyMap && !doNot
+ }
+ }
+ // set validation strategy for body param
+ p.HasSimpleBodyParams = hasSimpleBodyParams
+ p.HasSimpleBodyItems = hasSimpleBodyItems
+ p.HasModelBodyParams = hasModelBodyParams
+ p.HasModelBodyItems = hasModelBodyItems
+ p.HasModelBodyMap = hasModelBodyMap
+ p.HasSimpleBodyMap = hasSimpleBodyMap
+ }
+
+}
+
+// makeSecuritySchemes produces a sorted list of security schemes for this operation
+func (b *codeGenOpBuilder) makeSecuritySchemes(receiver string) GenSecuritySchemes {
+ return gatherSecuritySchemes(b.SecurityDefinitions, b.Name, b.Principal, receiver, b.GenOpts.PrincipalIsNullable())
+}
+
+// makeSecurityRequirements produces a sorted list of security requirements for this operation.
+// As for current, these requirements are not used by codegen (sec. requirement is determined at runtime).
+// We keep the order of the slice from the original spec, but sort the inner slice which comes from a map,
+// as well as the map of scopes.
+func (b *codeGenOpBuilder) makeSecurityRequirements(_ string) []GenSecurityRequirements {
+ if b.Security == nil {
+ // nil (default requirement) is different than [] (no requirement)
+ return nil
+ }
+
+ securityRequirements := make([]GenSecurityRequirements, 0, len(b.Security))
+ for _, req := range b.Security {
+ jointReq := make(GenSecurityRequirements, 0, len(req))
+ for _, j := range req {
+ scopes := j.Scopes
+ sort.Strings(scopes)
+ jointReq = append(jointReq, GenSecurityRequirement{
+ Name: j.Name,
+ Scopes: scopes,
+ })
+ }
+ // sort joint requirements (come from a map in spec)
+ sort.Sort(jointReq)
+ securityRequirements = append(securityRequirements, jointReq)
+ }
+ return securityRequirements
+}
+
+// cloneSchema returns a deep copy of a schema
+func (b *codeGenOpBuilder) cloneSchema(schema *spec.Schema) *spec.Schema {
+ savedSchema := &spec.Schema{}
+ schemaRep, _ := json.Marshal(schema)
+ _ = json.Unmarshal(schemaRep, savedSchema)
+ return savedSchema
+}
+
+// saveResolveContext keeps a copy of known definitions and schema to properly roll back on a makeGenSchema() call
+// This uses a deep clone the spec document to construct a type resolver which knows about definitions when the making of this operation started,
+// and only these definitions. We are not interested in the "original spec", but in the already transformed spec.
+func (b *codeGenOpBuilder) saveResolveContext(resolver *typeResolver, schema *spec.Schema) (*typeResolver, *spec.Schema) {
+ if b.PristineDoc == nil {
+ b.PristineDoc = b.Doc.Pristine()
+ }
+ rslv := newTypeResolver(b.GenOpts.LanguageOpts.ManglePackageName(resolver.ModelsPackage, defaultModelsTarget), b.DefaultImports[b.ModelsPackage], b.PristineDoc)
+
+ return rslv, b.cloneSchema(schema)
+}
+
+// liftExtraSchemas constructs the schema for an anonymous construct with some ExtraSchemas.
+//
+// When some ExtraSchemas are produced from something else than a definition,
+// this indicates we are not running in fully flattened mode and we need to render
+// these ExtraSchemas in the operation's package.
+// We need to rebuild the schema with a new type resolver to reflect this change in the
+// models package.
+func (b *codeGenOpBuilder) liftExtraSchemas(resolver, rslv *typeResolver, bs *spec.Schema, sc *schemaGenContext) (schema *GenSchema, err error) {
+ // restore resolving state before previous call to makeGenSchema()
+ sc.Schema = *bs
+
+ pg := sc.shallowClone()
+ pkg := b.GenOpts.LanguageOpts.ManglePackageName(resolver.ModelsPackage, defaultModelsTarget)
+
+ // make a resolver for current package (i.e. operations)
+ pg.TypeResolver = newTypeResolver("", b.DefaultImports[b.APIPackage], rslv.Doc).
+ withKeepDefinitionsPackage(pkg).
+ withDefinitionPackage(b.APIPackageAlias) // all new extra schemas are going to be in api pkg
+ pg.ExtraSchemas = make(map[string]GenSchema, len(sc.ExtraSchemas))
+ pg.UseContainerInName = true
+
+ // rebuild schema within local package
+ if err = pg.makeGenSchema(); err != nil {
+ return
+ }
+
+ // lift nested extra schemas (inlined types)
+ if b.ExtraSchemas == nil {
+ b.ExtraSchemas = make(map[string]GenSchema, len(pg.ExtraSchemas))
+ }
+ for _, v := range pg.ExtraSchemas {
+ vv := v
+ if !v.IsStream {
+ b.ExtraSchemas[vv.Name] = vv
+ }
+ }
+ schema = &pg.GenSchema
+ return
+}
+
+// buildOperationSchema constructs a schema for an operation (for body params or responses).
+// It determines if the schema is readily available from the models package,
+// or if a schema has to be generated in the operations package (i.e. is anonymous).
+// Whenever an anonymous schema needs some extra schemas, we also determine if these extras are
+// available from models or must be generated alongside the schema in the operations package.
+//
+// Duplicate extra schemas are pruned later on, when operations grouping in packages (e.g. from tags) takes place.
+func (b *codeGenOpBuilder) buildOperationSchema(schemaPath, containerName, schemaName, receiverName, indexVar string, sch *spec.Schema, resolver *typeResolver) (GenSchema, error) {
+ var schema GenSchema
+
+ if sch == nil {
+ sch = &spec.Schema{}
+ }
+ shallowClonedResolver := *resolver
+ shallowClonedResolver.ModelsFullPkg = b.DefaultImports[b.ModelsPackage]
+ rslv := &shallowClonedResolver
+
+ sc := schemaGenContext{
+ Path: schemaPath,
+ Name: containerName,
+ Receiver: receiverName,
+ ValueExpr: receiverName,
+ IndexVar: indexVar,
+ Schema: *sch,
+ Required: false,
+ TypeResolver: rslv,
+ Named: false,
+ IncludeModel: true,
+ IncludeValidator: b.GenOpts.IncludeValidator,
+ StrictAdditionalProperties: b.GenOpts.StrictAdditionalProperties,
+ ExtraSchemas: make(map[string]GenSchema),
+ StructTags: b.GenOpts.StructTags,
+ }
+
+ var (
+ br *typeResolver
+ bs *spec.Schema
+ )
+
+ if sch.Ref.String() == "" {
+ // backup the type resolver context
+ // (not needed when the schema has a name)
+ br, bs = b.saveResolveContext(rslv, sch)
+ }
+
+ if err := sc.makeGenSchema(); err != nil {
+ return GenSchema{}, err
+ }
+ for alias, pkg := range findImports(&sc.GenSchema) {
+ b.Imports[alias] = pkg
+ }
+
+ if sch.Ref.String() == "" && len(sc.ExtraSchemas) > 0 {
+ newSchema, err := b.liftExtraSchemas(resolver, br, bs, &sc)
+ if err != nil {
+ return GenSchema{}, err
+ }
+ if newSchema != nil {
+ schema = *newSchema
+ }
+ } else {
+ schema = sc.GenSchema
+ }
+
+ // new schemas will be in api pkg
+ schemaPkg := b.GenOpts.LanguageOpts.ManglePackageName(b.APIPackage, "")
+ schema.Pkg = schemaPkg
+
+ if schema.IsAnonymous {
+ // a generated name for anonymous schema
+ // TODO: support x-go-name
+ hasProperties := len(schema.Properties) > 0
+ isAllOf := len(schema.AllOf) > 0
+ isInterface := schema.IsInterface
+ hasValidations := schema.HasValidations
+
+ // for complex anonymous objects, produce an extra schema
+ if hasProperties || isAllOf {
+ if b.ExtraSchemas == nil {
+ b.ExtraSchemas = make(map[string]GenSchema)
+ }
+ schema.Name = schemaName
+ schema.GoType = schemaName
+ schema.IsAnonymous = false
+ b.ExtraSchemas[schemaName] = schema
+
+ // constructs new schema to refer to the newly created type
+ schema = GenSchema{}
+ schema.IsAnonymous = false
+ schema.IsComplexObject = true
+ schema.SwaggerType = schemaName
+ schema.HasValidations = hasValidations
+ schema.GoType = schemaName
+ schema.Pkg = schemaPkg
+ } else if isInterface {
+ schema = GenSchema{}
+ schema.IsAnonymous = false
+ schema.IsComplexObject = false
+ schema.IsInterface = true
+ schema.HasValidations = false
+ schema.GoType = iface
+ }
+ }
+
+ return schema, nil
+}
+
+func intersectTags(left, right []string) []string {
+ // dedupe
+ uniqueTags := make(map[string]struct{}, maxInt(len(left), len(right)))
+ for _, l := range left {
+ if len(right) == 0 || swag.ContainsStrings(right, l) {
+ uniqueTags[l] = struct{}{}
+ }
+ }
+ filtered := make([]string, 0, len(uniqueTags))
+ // stable output across generations, preserving original order
+ for _, k := range left {
+ if _, ok := uniqueTags[k]; !ok {
+ continue
+ }
+ filtered = append(filtered, k)
+ delete(uniqueTags, k)
+ }
+ return filtered
+}
+
+// analyze tags for an operation
+func (b *codeGenOpBuilder) analyzeTags() (string, []string, bool) {
+ var (
+ filter []string
+ tag string
+ hasTagOverride bool
+ )
+ if b.GenOpts != nil {
+ filter = b.GenOpts.Tags
+ }
+ intersected := intersectTags(pruneEmpty(b.Operation.Tags), filter)
+ if !b.GenOpts.SkipTagPackages && len(intersected) > 0 {
+ // override generation with: x-go-operation-tag
+ tag, hasTagOverride = b.Operation.Extensions.GetString(xGoOperationTag)
+ if !hasTagOverride {
+ // TODO(fred): this part should be delegated to some new TagsFor(operation) in go-openapi/analysis
+ tag = intersected[0]
+ gtags := b.Doc.Spec().Tags
+ for _, gtag := range gtags {
+ if gtag.Name != tag {
+ continue
+ }
+ // honor x-go-name in tag
+ if name, hasGoName := gtag.Extensions.GetString(xGoName); hasGoName {
+ tag = name
+ break
+ }
+ // honor x-go-operation-tag in tag
+ if name, hasOpName := gtag.Extensions.GetString(xGoOperationTag); hasOpName {
+ tag = name
+ break
+ }
+ }
+ }
+ }
+ if tag == b.APIPackage {
+ // conflict with "operations" package is handled separately
+ tag = renameOperationPackage(intersected, tag)
+ }
+ b.APIPackage = b.GenOpts.LanguageOpts.ManglePackageName(tag, b.APIPackage) // actual package name
+ b.APIPackageAlias = deconflictTag(intersected, b.APIPackage) // deconflicted import alias
+ return tag, intersected, len(filter) == 0 || len(filter) > 0 && len(intersected) > 0
+}
+
+func maxInt(a, b int) int {
+ if a > b {
+ return a
+ }
+ return b
+}
+
+// deconflictTag ensures generated packages for operations based on tags do not conflict
+// with other imports
+func deconflictTag(seenTags []string, pkg string) string {
+ return deconflictPkg(pkg, func(pkg string) string { return renameOperationPackage(seenTags, pkg) })
+}
+
+// deconflictPrincipal ensures that whenever an external principal package is added, it doesn't conflict
+// with standard imports
+func deconflictPrincipal(pkg string) string {
+ switch pkg {
+ case "principal":
+ return renamePrincipalPackage(pkg)
+ default:
+ return deconflictPkg(pkg, renamePrincipalPackage)
+ }
+}
+
+// deconflictPkg renames package names which conflict with standard imports
+func deconflictPkg(pkg string, renamer func(string) string) string {
+ switch pkg {
+ // package conflict with variables
+ case "api", "httptransport", "formats", "server":
+ fallthrough
+ // package conflict with go-openapi imports
+ case "errors", "runtime", "middleware", "security", "spec", "strfmt", "loads", "swag", "validate":
+ fallthrough
+ // package conflict with stdlib/other lib imports
+ case "tls", "http", "fmt", "strings", "log", "flags", "pflag", "json", "time":
+ return renamer(pkg)
+ }
+ return pkg
+}
+
+func renameOperationPackage(seenTags []string, pkg string) string {
+ current := strings.ToLower(pkg) + "ops"
+ if len(seenTags) == 0 {
+ return current
+ }
+ for swag.ContainsStringsCI(seenTags, current) {
+ current += "1"
+ }
+ return current
+}
+
+func renamePrincipalPackage(_ string) string {
+ // favors readability over perfect deconfliction
+ return "auth"
+}
+
+func renameServerPackage(pkg string) string {
+ // favors readability over perfect deconfliction
+ return "swagger" + pkg + "srv"
+}
+
+func renameAPIPackage(pkg string) string {
+ // favors readability over perfect deconfliction
+ return "swagger" + pkg
+}
+
+func renameImplementationPackage(pkg string) string {
+ // favors readability over perfect deconfliction
+ return "swagger" + pkg + "impl"
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/shared.go b/vendor/github.com/go-swagger/go-swagger/generator/shared.go
new file mode 100644
index 000000000..5e2c2cee2
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/shared.go
@@ -0,0 +1,1096 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "bytes"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log"
+ "os"
+ "path"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "strings"
+ "text/template"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+const (
+ // default generation targets structure
+ defaultModelsTarget = "models"
+ defaultServerTarget = "restapi"
+ defaultClientTarget = "client"
+ defaultOperationsTarget = "operations"
+ defaultClientName = "rest"
+ defaultServerName = "swagger"
+ defaultScheme = "http"
+ defaultImplementationTarget = "implementation"
+)
+
+func init() {
+ // all initializations for the generator package
+ debugOptions()
+ initLanguage()
+ initTemplateRepo()
+ initTypes()
+}
+
+// DefaultSectionOpts for a given opts, this is used when no config file is passed
+// and uses the embedded templates when no local override can be found
+func DefaultSectionOpts(gen *GenOpts) {
+ sec := gen.Sections
+ if len(sec.Models) == 0 {
+ opts := []TemplateOpts{
+ {
+ Name: "definition",
+ Source: "asset:model",
+ Target: "{{ joinFilePath .Target (toPackagePath .ModelPackage) }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}.go",
+ },
+ }
+ if gen.IncludeCLi {
+ opts = append(opts, TemplateOpts{
+ Name: "clidefinitionhook",
+ Source: "asset:cliModelcli",
+ Target: "{{ joinFilePath .Target (toPackagePath .CliPackage) }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_model.go",
+ })
+ }
+ sec.Models = opts
+ }
+
+ if len(sec.Operations) == 0 {
+ if gen.IsClient {
+ opts := []TemplateOpts{
+ {
+ Name: "parameters",
+ Source: "asset:clientParameter",
+ Target: "{{ joinFilePath .Target (toPackagePath .ClientPackage) (toPackagePath .Package) }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_parameters.go",
+ },
+ {
+ Name: "responses",
+ Source: "asset:clientResponse",
+ Target: "{{ joinFilePath .Target (toPackagePath .ClientPackage) (toPackagePath .Package) }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_responses.go",
+ },
+ }
+ if gen.IncludeCLi {
+ opts = append(opts, TemplateOpts{
+ Name: "clioperation",
+ Source: "asset:cliOperation",
+ Target: "{{ joinFilePath .Target (toPackagePath .CliPackage) }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_operation.go",
+ })
+ }
+ sec.Operations = opts
+ } else {
+ ops := []TemplateOpts{}
+ if gen.IncludeParameters {
+ ops = append(ops, TemplateOpts{
+ Name: "parameters",
+ Source: "asset:serverParameter",
+ Target: "{{ if .UseTags }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .APIPackage) (toPackagePath .Package) }}{{ else }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .Package) }}{{ end }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_parameters.go",
+ })
+ }
+ if gen.IncludeURLBuilder {
+ ops = append(ops, TemplateOpts{
+ Name: "urlbuilder",
+ Source: "asset:serverUrlbuilder",
+ Target: "{{ if .UseTags }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .APIPackage) (toPackagePath .Package) }}{{ else }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .Package) }}{{ end }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_urlbuilder.go",
+ })
+ }
+ if gen.IncludeResponses {
+ ops = append(ops, TemplateOpts{
+ Name: "responses",
+ Source: "asset:serverResponses",
+ Target: "{{ if .UseTags }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .APIPackage) (toPackagePath .Package) }}{{ else }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .Package) }}{{ end }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_responses.go",
+ })
+ }
+ if gen.IncludeHandler {
+ ops = append(ops, TemplateOpts{
+ Name: "handler",
+ Source: "asset:serverOperation",
+ Target: "{{ if .UseTags }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .APIPackage) (toPackagePath .Package) }}{{ else }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .Package) }}{{ end }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}.go",
+ })
+ }
+ sec.Operations = ops
+ }
+ }
+
+ if len(sec.OperationGroups) == 0 {
+ if gen.IsClient {
+ sec.OperationGroups = []TemplateOpts{
+ {
+ Name: "client",
+ Source: "asset:clientClient",
+ Target: "{{ joinFilePath .Target (toPackagePath .ClientPackage) (toPackagePath .Name)}}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_client.go",
+ },
+ }
+ } else {
+ sec.OperationGroups = []TemplateOpts{}
+ }
+ }
+
+ if len(sec.Application) == 0 {
+ if gen.IsClient {
+ opts := []TemplateOpts{
+ {
+ Name: "facade",
+ Source: "asset:clientFacade",
+ Target: "{{ joinFilePath .Target (toPackagePath .ClientPackage) }}",
+ FileName: "{{ snakize .Name }}Client.go",
+ },
+ }
+ if gen.IncludeCLi {
+ // include a commandline tool app
+ opts = append(opts, []TemplateOpts{{
+ Name: "commandline",
+ Source: "asset:cliCli",
+ Target: "{{ joinFilePath .Target (toPackagePath .CliPackage) }}",
+ FileName: "cli.go",
+ }, {
+ Name: "climain",
+ Source: "asset:cliMain",
+ Target: "{{ joinFilePath .Target \"cmd\" (toPackagePath .CliAppName) }}",
+ FileName: "main.go",
+ }, {
+ Name: "cliAutoComplete",
+ Source: "asset:cliCompletion",
+ Target: "{{ joinFilePath .Target (toPackagePath .CliPackage) }}",
+ FileName: "autocomplete.go",
+ }}...)
+ }
+ sec.Application = opts
+ } else {
+ opts := []TemplateOpts{
+ {
+ Name: "main",
+ Source: "asset:serverMain",
+ Target: "{{ joinFilePath .Target \"cmd\" .MainPackage }}",
+ FileName: "main.go",
+ },
+ {
+ Name: "embedded_spec",
+ Source: "asset:swaggerJsonEmbed",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) }}",
+ FileName: "embedded_spec.go",
+ },
+ {
+ Name: "server",
+ Source: "asset:serverServer",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) }}",
+ FileName: "server.go",
+ },
+ {
+ Name: "builder",
+ Source: "asset:serverBuilder",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .APIPackage) }}",
+ FileName: "{{ snakize (pascalize .Name) }}_api.go",
+ },
+ {
+ Name: "doc",
+ Source: "asset:serverDoc",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) }}",
+ FileName: "doc.go",
+ },
+ }
+ if gen.ImplementationPackage != "" {
+ // Use auto configure template
+ opts = append(opts, TemplateOpts{
+ Name: "autoconfigure",
+ Source: "asset:serverAutoconfigureapi",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) }}",
+ FileName: "auto_configure_{{ (snakize (pascalize .Name)) }}.go",
+ })
+
+ } else {
+ opts = append(opts, TemplateOpts{
+ Name: "configure",
+ Source: "asset:serverConfigureapi",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) }}",
+ FileName: "configure_{{ (snakize (pascalize .Name)) }}.go",
+ SkipExists: !gen.RegenerateConfigureAPI,
+ })
+ }
+ sec.Application = opts
+ }
+ }
+ gen.Sections = sec
+
+}
+
+// MarkdownOpts for rendering a spec as markdown
+func MarkdownOpts() *LanguageOpts {
+ opts := &LanguageOpts{}
+ opts.Init()
+ return opts
+}
+
+// MarkdownSectionOpts for a given opts and output file.
+func MarkdownSectionOpts(gen *GenOpts, output string) {
+ gen.Sections.Models = nil
+ gen.Sections.OperationGroups = nil
+ gen.Sections.Operations = nil
+ gen.LanguageOpts = MarkdownOpts()
+ gen.Sections.Application = []TemplateOpts{
+ {
+ Name: "markdowndocs",
+ Source: "asset:markdownDocs",
+ Target: filepath.Dir(output),
+ FileName: filepath.Base(output),
+ },
+ }
+}
+
+// TemplateOpts allows for codegen customization
+type TemplateOpts struct {
+ Name string `mapstructure:"name"`
+ Source string `mapstructure:"source"`
+ Target string `mapstructure:"target"`
+ FileName string `mapstructure:"file_name"`
+ SkipExists bool `mapstructure:"skip_exists"`
+ SkipFormat bool `mapstructure:"skip_format"`
+}
+
+// SectionOpts allows for specifying options to customize the templates used for generation
+type SectionOpts struct {
+ Application []TemplateOpts `mapstructure:"application"`
+ Operations []TemplateOpts `mapstructure:"operations"`
+ OperationGroups []TemplateOpts `mapstructure:"operation_groups"`
+ Models []TemplateOpts `mapstructure:"models"`
+}
+
+// GenOptsCommon the options for the generator
+type GenOptsCommon struct {
+ IncludeModel bool
+ IncludeValidator bool
+ IncludeHandler bool
+ IncludeParameters bool
+ IncludeResponses bool
+ IncludeURLBuilder bool
+ IncludeMain bool
+ IncludeSupport bool
+ IncludeCLi bool
+ ExcludeSpec bool
+ DumpData bool
+ ValidateSpec bool
+ FlattenOpts *analysis.FlattenOpts
+ IsClient bool
+ defaultsEnsured bool
+ PropertiesSpecOrder bool
+ StrictAdditionalProperties bool
+ AllowTemplateOverride bool
+
+ Spec string
+ APIPackage string
+ ModelPackage string
+ ServerPackage string
+ ClientPackage string
+ CliPackage string
+ CliAppName string // name of cli app. For example "dockerctl"
+ ImplementationPackage string
+ Principal string
+ PrincipalCustomIface bool // user-provided interface for Principal (non-nullable)
+ Target string // dir location where generated code is written to
+ Sections SectionOpts
+ LanguageOpts *LanguageOpts
+ TypeMapping map[string]string
+ Imports map[string]string
+ DefaultScheme string
+ DefaultProduces string
+ DefaultConsumes string
+ WithXML bool
+ TemplateDir string
+ Template string
+ RegenerateConfigureAPI bool
+ Operations []string
+ Models []string
+ Tags []string
+ StructTags []string
+ Name string
+ FlagStrategy string
+ CompatibilityMode string
+ ExistingModels string
+ Copyright string
+ SkipTagPackages bool
+ MainPackage string
+ IgnoreOperations bool
+ AllowEnumCI bool
+ StrictResponders bool
+ AcceptDefinitionsOnly bool
+
+ templates *Repository // a shallow clone of the global template repository
+}
+
+// CheckOpts carries out some global consistency checks on options.
+func (g *GenOpts) CheckOpts() error {
+ if g == nil {
+ return errors.New("gen opts are required")
+ }
+
+ if !filepath.IsAbs(g.Target) {
+ if _, err := filepath.Abs(g.Target); err != nil {
+ return fmt.Errorf("could not locate target %s: %v", g.Target, err)
+ }
+ }
+
+ if filepath.IsAbs(g.ServerPackage) {
+ return fmt.Errorf("you shouldn't specify an absolute path in --server-package: %s", g.ServerPackage)
+ }
+
+ if strings.HasPrefix(g.Spec, "http://") || strings.HasPrefix(g.Spec, "https://") {
+ return nil
+ }
+
+ pth, err := findSwaggerSpec(g.Spec)
+ if err != nil {
+ return err
+ }
+
+ // ensure spec path is absolute
+ g.Spec, err = filepath.Abs(pth)
+ if err != nil {
+ return fmt.Errorf("could not locate spec: %s", g.Spec)
+ }
+
+ return nil
+}
+
+// TargetPath returns the target generation path relative to the server package.
+// This method is used by templates, e.g. with {{ .TargetPath }}
+//
+// Errors cases are prevented by calling CheckOpts beforehand.
+//
+// Example:
+// Target: ${PWD}/tmp
+// ServerPackage: abc/efg
+//
+// Server is generated in ${PWD}/tmp/abc/efg
+// relative TargetPath returned: ../../../tmp
+func (g *GenOpts) TargetPath() string {
+ var tgt string
+ if g.Target == "" {
+ tgt = "." // That's for windows
+ } else {
+ tgt = g.Target
+ }
+ tgtAbs, _ := filepath.Abs(tgt)
+ srvPkg := filepath.FromSlash(g.LanguageOpts.ManglePackagePath(g.ServerPackage, "server"))
+ srvrAbs := filepath.Join(tgtAbs, srvPkg)
+ tgtRel, _ := filepath.Rel(srvrAbs, filepath.Dir(tgtAbs))
+ tgtRel = filepath.Join(tgtRel, filepath.Base(tgtAbs))
+ return tgtRel
+}
+
+// SpecPath returns the path to the spec relative to the server package.
+// If the spec is remote keep this absolute location.
+//
+// If spec is not relative to server (e.g. lives on a different drive on windows),
+// then the resolved path is absolute.
+//
+// This method is used by templates, e.g. with {{ .SpecPath }}
+//
+// Errors cases are prevented by calling CheckOpts beforehand.
+func (g *GenOpts) SpecPath() string {
+ if strings.HasPrefix(g.Spec, "http://") || strings.HasPrefix(g.Spec, "https://") {
+ return g.Spec
+ }
+ // Local specifications
+ specAbs, _ := filepath.Abs(g.Spec)
+ var tgt string
+ if g.Target == "" {
+ tgt = "." // That's for windows
+ } else {
+ tgt = g.Target
+ }
+ tgtAbs, _ := filepath.Abs(tgt)
+ srvPkg := filepath.FromSlash(g.LanguageOpts.ManglePackagePath(g.ServerPackage, "server"))
+ srvAbs := filepath.Join(tgtAbs, srvPkg)
+ specRel, err := filepath.Rel(srvAbs, specAbs)
+ if err != nil {
+ return specAbs
+ }
+ return specRel
+}
+
+// PrincipalIsNullable indicates whether the principal type used for authentication
+// may be used as a pointer
+func (g *GenOpts) PrincipalIsNullable() bool {
+ debugLog("Principal: %s, %t, isnullable: %t", g.Principal, g.PrincipalCustomIface, g.Principal != iface && !g.PrincipalCustomIface)
+ return g.Principal != iface && !g.PrincipalCustomIface
+}
+
+// EnsureDefaults for these gen opts
+func (g *GenOpts) EnsureDefaults() error {
+ if g.defaultsEnsured {
+ return nil
+ }
+
+ g.templates = templates.ShallowClone()
+
+ g.templates.LoadDefaults()
+
+ if g.LanguageOpts == nil {
+ g.LanguageOpts = DefaultLanguageFunc()
+ }
+
+ DefaultSectionOpts(g)
+
+ // set defaults for flattening options
+ if g.FlattenOpts == nil {
+ g.FlattenOpts = &analysis.FlattenOpts{
+ Minimal: true,
+ Verbose: true,
+ RemoveUnused: false,
+ Expand: false,
+ }
+ }
+
+ if g.DefaultScheme == "" {
+ g.DefaultScheme = defaultScheme
+ }
+
+ if g.DefaultConsumes == "" {
+ g.DefaultConsumes = runtime.JSONMime
+ }
+
+ if g.DefaultProduces == "" {
+ g.DefaultProduces = runtime.JSONMime
+ }
+
+ // always include validator with models
+ g.IncludeValidator = true
+
+ if g.Principal == "" {
+ g.Principal = iface
+ g.PrincipalCustomIface = false
+ }
+
+ g.defaultsEnsured = true
+ return nil
+}
+
+func (g *GenOpts) location(t *TemplateOpts, data interface{}) (string, string, error) {
+ v := reflect.Indirect(reflect.ValueOf(data))
+ fld := v.FieldByName("Name")
+ var name string
+ if fld.IsValid() {
+ log.Println("name field", fld.String())
+ name = fld.String()
+ }
+
+ fldpack := v.FieldByName("Package")
+ pkg := g.APIPackage
+ if fldpack.IsValid() {
+ log.Println("package field", fldpack.String())
+ pkg = fldpack.String()
+ }
+
+ var tags []string
+ tagsF := v.FieldByName("Tags")
+ if tagsF.IsValid() {
+ if tt, ok := tagsF.Interface().([]string); ok {
+ tags = tt
+ }
+ }
+
+ var useTags bool
+ useTagsF := v.FieldByName("UseTags")
+ if useTagsF.IsValid() {
+ useTags = useTagsF.Interface().(bool)
+ }
+
+ funcMap := FuncMapFunc(g.LanguageOpts)
+
+ pthTpl, err := template.New(t.Name + "-target").Funcs(funcMap).Parse(t.Target)
+ if err != nil {
+ return "", "", err
+ }
+
+ fNameTpl, err := template.New(t.Name + "-filename").Funcs(funcMap).Parse(t.FileName)
+ if err != nil {
+ return "", "", err
+ }
+
+ d := struct {
+ Name, CliAppName, Package, APIPackage, ServerPackage, ClientPackage, CliPackage, ModelPackage, MainPackage, Target string
+ Tags []string
+ UseTags bool
+ Context interface{}
+ }{
+ Name: name,
+ CliAppName: g.CliAppName,
+ Package: pkg,
+ APIPackage: g.APIPackage,
+ ServerPackage: g.ServerPackage,
+ ClientPackage: g.ClientPackage,
+ CliPackage: g.CliPackage,
+ ModelPackage: g.ModelPackage,
+ MainPackage: g.MainPackage,
+ Target: g.Target,
+ Tags: tags,
+ UseTags: useTags,
+ Context: data,
+ }
+
+ var pthBuf bytes.Buffer
+ if e := pthTpl.Execute(&pthBuf, d); e != nil {
+ return "", "", e
+ }
+
+ var fNameBuf bytes.Buffer
+ if e := fNameTpl.Execute(&fNameBuf, d); e != nil {
+ return "", "", e
+ }
+ return pthBuf.String(), fileName(fNameBuf.String()), nil
+}
+
+func (g *GenOpts) render(t *TemplateOpts, data interface{}) ([]byte, error) {
+ var templ *template.Template
+
+ if strings.HasPrefix(strings.ToLower(t.Source), "asset:") {
+ tt, err := g.templates.Get(strings.TrimPrefix(t.Source, "asset:"))
+ if err != nil {
+ return nil, err
+ }
+ templ = tt
+ }
+
+ if templ == nil {
+ // try to load from repository (and enable dependencies)
+ name := swag.ToJSONName(strings.TrimSuffix(t.Source, ".gotmpl"))
+ tt, err := g.templates.Get(name)
+ if err == nil {
+ templ = tt
+ }
+ }
+
+ if templ == nil {
+ // try to load template from disk, in TemplateDir if specified
+ // (dependencies resolution is limited to preloaded assets)
+ var templateFile string
+ if g.TemplateDir != "" {
+ templateFile = filepath.Join(g.TemplateDir, t.Source)
+ } else {
+ templateFile = t.Source
+ }
+ content, err := os.ReadFile(templateFile)
+ if err != nil {
+ return nil, fmt.Errorf("error while opening %s template file: %v", templateFile, err)
+ }
+ tt, err := template.New(t.Source).Funcs(FuncMapFunc(g.LanguageOpts)).Parse(string(content))
+ if err != nil {
+ return nil, fmt.Errorf("template parsing failed on template %s: %v", t.Name, err)
+ }
+ templ = tt
+ }
+
+ if templ == nil {
+ return nil, fmt.Errorf("template %q not found", t.Source)
+ }
+
+ var tBuf bytes.Buffer
+ if err := templ.Execute(&tBuf, data); err != nil {
+ return nil, fmt.Errorf("template execution failed for template %s: %v", t.Name, err)
+ }
+ log.Printf("executed template %s", t.Source)
+
+ return tBuf.Bytes(), nil
+}
+
+// Render template and write generated source code
+// generated code is reformatted ("linted"), which gives an
+// additional level of checking. If this step fails, the generated
+// code is still dumped, for template debugging purposes.
+func (g *GenOpts) write(t *TemplateOpts, data interface{}) error {
+ dir, fname, err := g.location(t, data)
+ if err != nil {
+ return fmt.Errorf("failed to resolve template location for template %s: %v", t.Name, err)
+ }
+
+ if t.SkipExists && fileExists(dir, fname) {
+ debugLog("skipping generation of %s because it already exists and skip_exist directive is set for %s",
+ filepath.Join(dir, fname), t.Name)
+ return nil
+ }
+
+ log.Printf("creating generated file %q in %q as %s", fname, dir, t.Name)
+ content, err := g.render(t, data)
+ if err != nil {
+ return fmt.Errorf("failed rendering template data for %s: %v", t.Name, err)
+ }
+
+ if dir != "" {
+ _, exists := os.Stat(dir)
+ if os.IsNotExist(exists) {
+ debugLog("creating directory %q for \"%s\"", dir, t.Name)
+ // Directory settings consistent with file privileges.
+ // Environment's umask may alter this setup
+ if e := os.MkdirAll(dir, 0755); e != nil {
+ return e
+ }
+ }
+ }
+
+ // Conditionally format the code, unless the user wants to skip
+ formatted := content
+ var writeerr error
+
+ if !t.SkipFormat {
+ formatted, err = g.LanguageOpts.FormatContent(filepath.Join(dir, fname), content)
+ if err != nil {
+ log.Printf("source formatting failed on template-generated source (%q for %s). Check that your template produces valid code", filepath.Join(dir, fname), t.Name)
+ writeerr = os.WriteFile(filepath.Join(dir, fname), content, 0644) // #nosec
+ if writeerr != nil {
+ return fmt.Errorf("failed to write (unformatted) file %q in %q: %v", fname, dir, writeerr)
+ }
+ log.Printf("unformatted generated source %q has been dumped for template debugging purposes. DO NOT build on this source!", fname)
+ return fmt.Errorf("source formatting on generated source %q failed: %v", t.Name, err)
+ }
+ }
+
+ writeerr = os.WriteFile(filepath.Join(dir, fname), formatted, 0644) // #nosec
+ if writeerr != nil {
+ return fmt.Errorf("failed to write file %q in %q: %v", fname, dir, writeerr)
+ }
+ return err
+}
+
+func fileName(in string) string {
+ ext := filepath.Ext(in)
+ return swag.ToFileName(strings.TrimSuffix(in, ext)) + ext
+}
+
+func (g *GenOpts) shouldRenderApp(t *TemplateOpts, _ *GenApp) bool {
+ switch swag.ToFileName(swag.ToGoName(t.Name)) {
+ case "main":
+ return g.IncludeMain
+ case "embedded_spec":
+ return !g.ExcludeSpec
+ default:
+ return true
+ }
+}
+
+func (g *GenOpts) shouldRenderOperations() bool {
+ return g.IncludeHandler || g.IncludeParameters || g.IncludeResponses
+}
+
+func (g *GenOpts) renderApplication(app *GenApp) error {
+ log.Printf("rendering %d templates for application %s", len(g.Sections.Application), app.Name)
+ for _, tp := range g.Sections.Application {
+ templ := tp
+ if !g.shouldRenderApp(&templ, app) {
+ continue
+ }
+ if err := g.write(&templ, app); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (g *GenOpts) renderOperationGroup(gg *GenOperationGroup) error {
+ log.Printf("rendering %d templates for operation group %s", len(g.Sections.OperationGroups), g.Name)
+ for _, tp := range g.Sections.OperationGroups {
+ templ := tp
+ if !g.shouldRenderOperations() {
+ continue
+ }
+
+ if err := g.write(&templ, gg); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (g *GenOpts) renderOperation(gg *GenOperation) error {
+ log.Printf("rendering %d templates for operation %s", len(g.Sections.Operations), g.Name)
+ for _, tp := range g.Sections.Operations {
+ templ := tp
+ if !g.shouldRenderOperations() {
+ continue
+ }
+
+ if err := g.write(&templ, gg); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (g *GenOpts) renderDefinition(gg *GenDefinition) error {
+ log.Printf("rendering %d templates for model %s", len(g.Sections.Models), gg.Name)
+ for _, tp := range g.Sections.Models {
+ templ := tp
+ if !g.IncludeModel {
+ continue
+ }
+
+ if err := g.write(&templ, gg); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (g *GenOptsCommon) setTemplates() error {
+ if g.Template != "" {
+ // set contrib templates
+ if err := g.templates.LoadContrib(g.Template); err != nil {
+ return err
+ }
+ }
+
+ g.templates.SetAllowOverride(g.AllowTemplateOverride)
+
+ if g.TemplateDir != "" {
+ // set custom templates
+ if err := g.templates.LoadDir(g.TemplateDir); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// defaultImports produces a default map for imports with models
+func (g *GenOpts) defaultImports() map[string]string {
+ baseImport := g.LanguageOpts.baseImport(g.Target)
+ defaultImports := make(map[string]string, 50)
+
+ var modelsAlias, importPath string
+ if g.ExistingModels == "" {
+ // generated models
+ importPath = path.Join(
+ baseImport,
+ g.LanguageOpts.ManglePackagePath(g.ModelPackage, defaultModelsTarget))
+ modelsAlias = g.LanguageOpts.ManglePackageName(g.ModelPackage, defaultModelsTarget)
+ } else {
+ // external models
+ importPath = g.LanguageOpts.ManglePackagePath(g.ExistingModels, "")
+ modelsAlias = path.Base(defaultModelsTarget)
+ }
+ defaultImports[modelsAlias] = importPath
+
+ // resolve model representing an authenticated principal
+ alias, _, target := g.resolvePrincipal()
+ if alias == "" || target == g.ModelPackage || path.Base(target) == modelsAlias {
+ // if principal is specified with the models generation package, do not import any extra package
+ return defaultImports
+ }
+
+ if pth, _ := path.Split(target); pth != "" {
+ // if principal is specified with a path, assume this is a fully qualified package and generate this import
+ defaultImports[alias] = target
+ } else {
+ // if principal is specified with a relative path (no "/", e.g. internal.Principal), assume it is located in generated target
+ defaultImports[alias] = path.Join(baseImport, target)
+ }
+ return defaultImports
+}
+
+// initImports produces a default map for import with the specified root for operations
+func (g *GenOpts) initImports(operationsPackage string) map[string]string {
+ baseImport := g.LanguageOpts.baseImport(g.Target)
+
+ imports := make(map[string]string, 50)
+ imports[g.LanguageOpts.ManglePackageName(operationsPackage, defaultOperationsTarget)] = path.Join(
+ baseImport,
+ g.LanguageOpts.ManglePackagePath(operationsPackage, defaultOperationsTarget))
+ return imports
+}
+
+// PrincipalAlias returns an aliased type to the principal
+func (g *GenOpts) PrincipalAlias() string {
+ _, principal, _ := g.resolvePrincipal()
+ return principal
+}
+
+func (g *GenOpts) resolvePrincipal() (string, string, string) {
+ dotLocation := strings.LastIndex(g.Principal, ".")
+ if dotLocation < 0 {
+ return "", g.Principal, ""
+ }
+
+ // handle possible conflicts with injected principal package
+ // NOTE(fred): we do not check here for conflicts with packages created from operation tags, only standard imports
+ alias := deconflictPrincipal(importAlias(g.Principal[:dotLocation]))
+ return alias, alias + g.Principal[dotLocation:], g.Principal[:dotLocation]
+}
+
+func fileExists(target, name string) bool {
+ _, err := os.Stat(filepath.Join(target, name))
+ return !os.IsNotExist(err)
+}
+
+func gatherModels(specDoc *loads.Document, modelNames []string) (map[string]spec.Schema, error) {
+ modelNames = pruneEmpty(modelNames)
+ models, mnc := make(map[string]spec.Schema), len(modelNames)
+ defs := specDoc.Spec().Definitions
+
+ if mnc > 0 {
+ var unknownModels []string
+ for _, m := range modelNames {
+ _, ok := defs[m]
+ if !ok {
+ unknownModels = append(unknownModels, m)
+ }
+ }
+ if len(unknownModels) != 0 {
+ return nil, fmt.Errorf("unknown models: %s", strings.Join(unknownModels, ", "))
+ }
+ }
+ for k, v := range defs {
+ if mnc == 0 {
+ models[k] = v
+ }
+ for _, nm := range modelNames {
+ if k == nm {
+ models[k] = v
+ }
+ }
+ }
+ return models, nil
+}
+
+// titleOrDefault infers a name for the app from the title of the spec
+func titleOrDefault(specDoc *loads.Document, name, defaultName string) string {
+ if strings.TrimSpace(name) == "" {
+ if specDoc.Spec().Info != nil && strings.TrimSpace(specDoc.Spec().Info.Title) != "" {
+ name = specDoc.Spec().Info.Title
+ } else {
+ name = defaultName
+ }
+ }
+ return swag.ToGoName(name)
+}
+
+func mainNameOrDefault(specDoc *loads.Document, name, defaultName string) string {
+ // *_test won't do as main server name
+ return strings.TrimSuffix(titleOrDefault(specDoc, name, defaultName), "Test")
+}
+
+func appNameOrDefault(specDoc *loads.Document, name, defaultName string) string {
+ // *_test won't do as app names
+ name = strings.TrimSuffix(titleOrDefault(specDoc, name, defaultName), "Test")
+ if name == "" {
+ name = swag.ToGoName(defaultName)
+ }
+ return name
+}
+
+type opRef struct {
+ Method string
+ Path string
+ Key string
+ ID string
+ Op *spec.Operation
+}
+
+type opRefs []opRef
+
+func (o opRefs) Len() int { return len(o) }
+func (o opRefs) Swap(i, j int) { o[i], o[j] = o[j], o[i] }
+func (o opRefs) Less(i, j int) bool { return o[i].Key < o[j].Key }
+
+func gatherOperations(specDoc *analysis.Spec, operationIDs []string) map[string]opRef {
+ operationIDs = pruneEmpty(operationIDs)
+ var oprefs opRefs
+
+ for method, pathItem := range specDoc.Operations() {
+ for path, operation := range pathItem {
+ vv := *operation
+ oprefs = append(oprefs, opRef{
+ Key: swag.ToGoName(strings.ToLower(method) + " " + swag.ToHumanNameTitle(path)),
+ Method: method,
+ Path: path,
+ ID: vv.ID,
+ Op: &vv,
+ })
+ }
+ }
+
+ sort.Sort(oprefs)
+
+ operations := make(map[string]opRef)
+ for _, opr := range oprefs {
+ nm := opr.ID
+ if nm == "" {
+ nm = opr.Key
+ }
+
+ oo, found := operations[nm]
+ if found && oo.Method != opr.Method && oo.Path != opr.Path {
+ nm = opr.Key
+ }
+ if len(operationIDs) == 0 || swag.ContainsStrings(operationIDs, opr.ID) || swag.ContainsStrings(operationIDs, nm) {
+ opr.ID = nm
+ opr.Op.ID = nm
+ operations[nm] = opr
+ }
+ }
+
+ return operations
+}
+
+func pruneEmpty(in []string) (out []string) {
+ for _, v := range in {
+ if v != "" {
+ out = append(out, v)
+ }
+ }
+ return
+}
+
+func trimBOM(in string) string {
+ return strings.Trim(in, "\xef\xbb\xbf")
+}
+
+// gatherSecuritySchemes produces a sorted representation from a map of spec security schemes
+func gatherSecuritySchemes(securitySchemes map[string]spec.SecurityScheme, appName, principal, receiver string, nullable bool) (security GenSecuritySchemes) {
+ for scheme, req := range securitySchemes {
+ isOAuth2 := strings.ToLower(req.Type) == "oauth2"
+ scopes := make([]string, 0, len(req.Scopes))
+ genScopes := make([]GenSecurityScope, 0, len(req.Scopes))
+ if isOAuth2 {
+ for k, v := range req.Scopes {
+ scopes = append(scopes, k)
+ genScopes = append(genScopes, GenSecurityScope{Name: k, Description: v})
+ }
+ sort.Strings(scopes)
+ }
+
+ security = append(security, GenSecurityScheme{
+ AppName: appName,
+ ID: scheme,
+ ReceiverName: receiver,
+ Name: req.Name,
+ IsBasicAuth: strings.ToLower(req.Type) == "basic",
+ IsAPIKeyAuth: strings.ToLower(req.Type) == "apikey",
+ IsOAuth2: isOAuth2,
+ Scopes: scopes,
+ ScopesDesc: genScopes,
+ Principal: principal,
+ Source: req.In,
+ // from original spec
+ Description: req.Description,
+ Type: strings.ToLower(req.Type),
+ In: req.In,
+ Flow: req.Flow,
+ AuthorizationURL: req.AuthorizationURL,
+ TokenURL: req.TokenURL,
+ Extensions: req.Extensions,
+
+ PrincipalIsNullable: nullable,
+ })
+ }
+ sort.Sort(security)
+ return
+}
+
+// securityRequirements just clones the original SecurityRequirements from either the spec
+// or an operation, without any modification. This is used to generate documentation.
+func securityRequirements(orig []map[string][]string) (result []analysis.SecurityRequirement) {
+ for _, r := range orig {
+ for k, v := range r {
+ result = append(result, analysis.SecurityRequirement{Name: k, Scopes: v})
+ }
+ }
+ // TODO(fred): sort this for stable generation
+ return
+}
+
+// gatherExtraSchemas produces a sorted list of extra schemas.
+//
+// ExtraSchemas are inlined types rendered in the same model file.
+func gatherExtraSchemas(extraMap map[string]GenSchema) (extras GenSchemaList) {
+ var extraKeys []string
+ for k := range extraMap {
+ extraKeys = append(extraKeys, k)
+ }
+ sort.Strings(extraKeys)
+ for _, k := range extraKeys {
+ // figure out if top level validations are needed
+ p := extraMap[k]
+ p.HasValidations = shallowValidationLookup(p)
+ extras = append(extras, p)
+ }
+ return
+}
+
+func getExtraSchemes(ext spec.Extensions) []string {
+ if ess, ok := ext.GetStringSlice(xSchemes); ok {
+ return ess
+ }
+ return nil
+}
+
+func gatherURISchemes(swsp *spec.Swagger, operation spec.Operation) ([]string, []string) {
+ var extraSchemes []string
+ extraSchemes = append(extraSchemes, getExtraSchemes(operation.Extensions)...)
+ extraSchemes = concatUnique(getExtraSchemes(swsp.Extensions), extraSchemes)
+ sort.Strings(extraSchemes)
+
+ schemes := concatUnique(swsp.Schemes, operation.Schemes)
+ sort.Strings(schemes)
+
+ return schemes, extraSchemes
+}
+
+func dumpData(data interface{}) error {
+ bb, err := json.MarshalIndent(data, "", " ")
+ if err != nil {
+ return err
+ }
+ fmt.Fprintln(os.Stdout, string(bb))
+ return nil
+}
+
+func importAlias(pkg string) string {
+ _, k := path.Split(pkg)
+ return k
+}
+
+// concatUnique concatenate collections of strings with deduplication
+func concatUnique(collections ...[]string) []string {
+ resultSet := make(map[string]struct{})
+ for _, c := range collections {
+ for _, i := range c {
+ if _, ok := resultSet[i]; !ok {
+ resultSet[i] = struct{}{}
+ }
+ }
+ }
+ result := make([]string, 0, len(resultSet))
+ for k := range resultSet {
+ result = append(result, k)
+ }
+ return result
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/spec.go b/vendor/github.com/go-swagger/go-swagger/generator/spec.go
new file mode 100644
index 000000000..e7399bb95
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/spec.go
@@ -0,0 +1,273 @@
+package generator
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+
+ "github.com/go-openapi/analysis"
+ swaggererrors "github.com/go-openapi/errors"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ yamlv2 "gopkg.in/yaml.v2"
+)
+
+func (g *GenOpts) validateAndFlattenSpec() (*loads.Document, error) {
+ // Load spec document
+ specDoc, err := loads.Spec(g.Spec)
+ if err != nil {
+ return nil, err
+ }
+
+ // If accepts definitions only, add dummy swagger header to pass validation
+ if g.AcceptDefinitionsOnly {
+ specDoc, err = applyDefaultSwagger(specDoc)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Validate if needed
+ if g.ValidateSpec {
+ log.Printf("validating spec %v", g.Spec)
+ validationErrors := validate.Spec(specDoc, strfmt.Default)
+ if validationErrors != nil {
+ str := fmt.Sprintf("The swagger spec at %q is invalid against swagger specification %s. see errors :\n",
+ g.Spec, specDoc.Version())
+ for _, desc := range validationErrors.(*swaggererrors.CompositeError).Errors {
+ str += fmt.Sprintf("- %s\n", desc)
+ }
+ return nil, errors.New(str)
+ }
+ // TODO(fredbi): due to uncontrolled $ref state in spec, we need to reload the spec atm, or flatten won't
+ // work properly (validate expansion alters the $ref cache in go-openapi/spec)
+ specDoc, _ = loads.Spec(g.Spec)
+ }
+
+ // Flatten spec
+ //
+ // Some preprocessing is required before codegen
+ //
+ // This ensures at least that $ref's in the spec document are canonical,
+ // i.e all $ref are local to this file and point to some uniquely named definition.
+ //
+ // Default option is to ensure minimal flattening of $ref, bundling remote $refs and relocating arbitrary JSON
+ // pointers as definitions.
+ // This preprocessing may introduce duplicate names (e.g. remote $ref with same name). In this case, a definition
+ // suffixed with "OAIGen" is produced.
+ //
+ // Full flattening option farther transforms the spec by moving every complex object (e.g. with some properties)
+ // as a standalone definition.
+ //
+ // Eventually, an "expand spec" option is available. It is essentially useful for testing purposes.
+ //
+ // NOTE(fredbi): spec expansion may produce some unsupported constructs and is not yet protected against the
+ // following cases:
+ // - polymorphic types generation may fail with expansion (expand destructs the reuse intent of the $ref in allOf)
+ // - name duplicates may occur and result in compilation failures
+ //
+ // The right place to fix these shortcomings is go-openapi/analysis.
+
+ g.FlattenOpts.BasePath = specDoc.SpecFilePath()
+ g.FlattenOpts.Spec = analysis.New(specDoc.Spec())
+
+ g.printFlattenOpts()
+
+ if err = analysis.Flatten(*g.FlattenOpts); err != nil {
+ return nil, err
+ }
+
+ // yields the preprocessed spec document
+ return specDoc, nil
+}
+
+func (g *GenOpts) analyzeSpec() (*loads.Document, *analysis.Spec, error) {
+ // load, validate and flatten
+ specDoc, err := g.validateAndFlattenSpec()
+ if err != nil {
+ return nil, nil, err
+ }
+
+ // spec preprocessing option
+ if g.PropertiesSpecOrder {
+ g.Spec = WithAutoXOrder(g.Spec)
+ specDoc, err = loads.Spec(g.Spec)
+ if err != nil {
+ return nil, nil, err
+ }
+ }
+
+ // analyze the spec
+ analyzed := analysis.New(specDoc.Spec())
+
+ return specDoc, analyzed, nil
+}
+
+func (g *GenOpts) printFlattenOpts() {
+ var preprocessingOption string
+ switch {
+ case g.FlattenOpts.Expand:
+ preprocessingOption = "expand"
+ case g.FlattenOpts.Minimal:
+ preprocessingOption = "minimal flattening"
+ default:
+ preprocessingOption = "full flattening"
+ }
+ log.Printf("preprocessing spec with option: %s", preprocessingOption)
+}
+
+// findSwaggerSpec fetches a default swagger spec if none is provided
+func findSwaggerSpec(nm string) (string, error) {
+ specs := []string{"swagger.json", "swagger.yml", "swagger.yaml"}
+ if nm != "" {
+ specs = []string{nm}
+ }
+ var name string
+ for _, nn := range specs {
+ f, err := os.Stat(nn)
+ if err != nil {
+ if os.IsNotExist(err) {
+ continue
+ }
+ return "", err
+ }
+ if f.IsDir() {
+ return "", fmt.Errorf("%s is a directory", nn)
+ }
+ name = nn
+ break
+ }
+ if name == "" {
+ return "", errors.New("couldn't find a swagger spec")
+ }
+ return name, nil
+}
+
+// WithAutoXOrder amends the spec to specify property order as they appear
+// in the spec (supports yaml documents only).
+func WithAutoXOrder(specPath string) string {
+ lookFor := func(ele interface{}, key string) (yamlv2.MapSlice, bool) {
+ if slice, ok := ele.(yamlv2.MapSlice); ok {
+ for _, v := range slice {
+ if v.Key == key {
+ if slice, ok := v.Value.(yamlv2.MapSlice); ok {
+ return slice, ok
+ }
+ }
+ }
+ }
+ return nil, false
+ }
+
+ var addXOrder func(interface{})
+ addXOrder = func(element interface{}) {
+ if props, ok := lookFor(element, "properties"); ok {
+ for i, prop := range props {
+ if pSlice, ok := prop.Value.(yamlv2.MapSlice); ok {
+ isObject := false
+ xOrderIndex := -1 // find if x-order already exists
+
+ for i, v := range pSlice {
+ if v.Key == "type" && v.Value == object {
+ isObject = true
+ }
+ if v.Key == xOrder {
+ xOrderIndex = i
+ break
+ }
+ }
+
+ if xOrderIndex > -1 { // override existing x-order
+ pSlice[xOrderIndex] = yamlv2.MapItem{Key: xOrder, Value: i}
+ } else { // append new x-order
+ pSlice = append(pSlice, yamlv2.MapItem{Key: xOrder, Value: i})
+ }
+ prop.Value = pSlice
+ props[i] = prop
+
+ if isObject {
+ addXOrder(pSlice)
+ }
+ }
+ }
+ }
+ }
+
+ data, err := swag.LoadFromFileOrHTTP(specPath)
+ if err != nil {
+ panic(err)
+ }
+
+ yamlDoc, err := BytesToYAMLv2Doc(data)
+ if err != nil {
+ panic(err)
+ }
+
+ if defs, ok := lookFor(yamlDoc, "definitions"); ok {
+ for _, def := range defs {
+ addXOrder(def.Value)
+ }
+ }
+
+ addXOrder(yamlDoc)
+
+ out, err := yamlv2.Marshal(yamlDoc)
+ if err != nil {
+ panic(err)
+ }
+
+ tmpDir, err := os.MkdirTemp("", "go-swagger-")
+ if err != nil {
+ panic(err)
+ }
+
+ tmpFile := filepath.Join(tmpDir, filepath.Base(specPath))
+ if err := os.WriteFile(tmpFile, out, 0600); err != nil {
+ panic(err)
+ }
+ return tmpFile
+}
+
+// BytesToYAMLDoc converts a byte slice into a YAML document
+func BytesToYAMLv2Doc(data []byte) (interface{}, error) {
+ var canary map[interface{}]interface{} // validate this is an object and not a different type
+ if err := yamlv2.Unmarshal(data, &canary); err != nil {
+ return nil, err
+ }
+
+ var document yamlv2.MapSlice // preserve order that is present in the document
+ if err := yamlv2.Unmarshal(data, &document); err != nil {
+ return nil, err
+ }
+ return document, nil
+}
+
+func applyDefaultSwagger(doc *loads.Document) (*loads.Document, error) {
+ // bake a minimal swagger spec to pass validation
+ swspec := doc.Spec()
+ if swspec.Swagger == "" {
+ swspec.Swagger = "2.0"
+ }
+ if swspec.Info == nil {
+ info := new(spec.Info)
+ info.Version = "0.0.0"
+ info.Title = "minimal"
+ swspec.Info = info
+ }
+ if swspec.Paths == nil {
+ swspec.Paths = &spec.Paths{}
+ }
+ // rewrite the document with the new addition
+ jazon, err := json.Marshal(swspec)
+ if err != nil {
+ return nil, err
+ }
+ return loads.Analyzed(jazon, swspec.Swagger)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/structs.go b/vendor/github.com/go-swagger/go-swagger/generator/structs.go
new file mode 100644
index 000000000..522be1446
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/structs.go
@@ -0,0 +1,803 @@
+package generator
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/spec"
+)
+
+// GenCommon contains common properties needed across
+// definitions, app and operations
+// TargetImportPath may be used by templates to import other (possibly
+// generated) packages in the generation path (e.g. relative to GOPATH).
+// TargetImportPath is NOT used by standard templates.
+type GenCommon struct {
+ Copyright string
+ TargetImportPath string
+}
+
+// GenDefinition contains all the properties to generate a
+// definition from a swagger spec
+type GenDefinition struct {
+ GenCommon
+ GenSchema
+ Package string
+ Imports map[string]string
+ DefaultImports map[string]string
+ ExtraSchemas GenSchemaList
+ DependsOn []string
+ External bool
+}
+
+// GenDefinitions represents a list of operations to generate
+// this implements a sort by operation id
+type GenDefinitions []GenDefinition
+
+func (g GenDefinitions) Len() int { return len(g) }
+func (g GenDefinitions) Less(i, j int) bool { return g[i].Name < g[j].Name }
+func (g GenDefinitions) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+
+// GenSchemaList is a list of schemas for generation.
+//
+// It can be sorted by name to get a stable struct layout for
+// version control and such
+type GenSchemaList []GenSchema
+
+// GenSchema contains all the information needed to generate the code
+// for a schema
+type GenSchema struct {
+ resolvedType
+ sharedValidations
+ Example string
+ OriginalName string
+ Name string
+ Suffix string
+ Path string
+ ValueExpression string
+ IndexVar string
+ KeyVar string
+ Title string
+ Description string
+ Location string
+ ReceiverName string
+ Items *GenSchema
+ AllowsAdditionalItems bool
+ HasAdditionalItems bool
+ AdditionalItems *GenSchema
+ Object *GenSchema
+ XMLName string
+ CustomTag string
+ Properties GenSchemaList
+ AllOf GenSchemaList
+ HasAdditionalProperties bool
+ IsAdditionalProperties bool
+ AdditionalProperties *GenSchema
+ StrictAdditionalProperties bool
+ ReadOnly bool
+ IsVirtual bool
+ IsBaseType bool
+ HasBaseType bool
+ IsSubType bool
+ IsExported bool
+ DiscriminatorField string
+ DiscriminatorValue string
+ Discriminates map[string]string
+ Parents []string
+ IncludeValidator bool
+ IncludeModel bool
+ Default interface{}
+ WantsMarshalBinary bool // do we generate MarshalBinary interface?
+ StructTags []string
+ ExtraImports map[string]string // non-standard imports detected when using external types
+ ExternalDocs *spec.ExternalDocumentation
+}
+
+func (g GenSchema) renderMarshalTag() string {
+ if g.HasBaseType {
+ return "-"
+ }
+
+ var result strings.Builder
+
+ result.WriteString(g.OriginalName)
+
+ if !g.Required && g.IsEmptyOmitted {
+ result.WriteString(",omitempty")
+ }
+
+ if g.IsJSONString {
+ result.WriteString(",string")
+ }
+
+ return result.String()
+}
+
+// PrintTags takes care of rendering tags for a struct field
+func (g GenSchema) PrintTags() string {
+ tags := make(map[string]string, 3)
+ orderedTags := make([]string, 0, 3)
+
+ tags["json"] = g.renderMarshalTag()
+ orderedTags = append(orderedTags, "json")
+
+ if len(g.XMLName) > 0 {
+ if !g.Required && g.IsEmptyOmitted {
+ tags["xml"] = g.XMLName + ",omitempty"
+ } else {
+ tags["xml"] = g.XMLName
+ }
+ orderedTags = append(orderedTags, "xml")
+ }
+
+ // Add extra struct tags, only if the tag hasn't already been set, i.e. example.
+ // Extra struct tags have the same value has the `json` tag.
+ for _, tag := range g.StructTags {
+ if _, exists := tags[tag]; exists {
+ // dedupe
+ continue
+ }
+
+ switch {
+ case tag == "example" && len(g.Example) > 0:
+ // only add example tag if it's contained in the struct tags
+ tags["example"] = g.Example // json representation of the example object
+ case tag == "description" && len(g.Description) > 0:
+ tags["description"] = g.Description
+ default:
+ tags[tag] = tags["json"]
+ }
+
+ orderedTags = append(orderedTags, tag)
+ }
+
+ // Assemble the tags in key value pairs with the value properly quoted.
+ kvPairs := make([]string, 0, len(orderedTags)+1)
+ for _, key := range orderedTags {
+ kvPairs = append(kvPairs, fmt.Sprintf("%s:%s", key, strconv.Quote(tags[key])))
+ }
+
+ if len(g.CustomTag) > 0 {
+ kvPairs = append(kvPairs, g.CustomTag)
+ }
+
+ // Join the key value pairs by a space.
+ completeTag := strings.Join(kvPairs, " ")
+
+ // If the values contain a backtick, we cannot render the tag using backticks because Go does not support
+ // escaping backticks in raw string literals.
+ valuesHaveBacktick := false
+ for _, value := range tags {
+ if !strconv.CanBackquote(value) {
+ valuesHaveBacktick = true
+ break
+ }
+ }
+
+ if !valuesHaveBacktick {
+ return fmt.Sprintf("`%s`", completeTag)
+ }
+
+ // We have to escape the tag again to put it in a literal with double quotes as the tag format uses double quotes.
+ return strconv.Quote(completeTag)
+}
+
+// UnderlyingType tells the go type or the aliased go type
+func (g GenSchema) UnderlyingType() string {
+ if g.IsAliased {
+ return g.AliasedType
+ }
+ return g.GoType
+}
+
+// ToString returns a string conversion expression for the schema
+func (g GenSchema) ToString() string {
+ return g.resolvedType.ToString(g.ValueExpression)
+}
+
+func (g GenSchemaList) Len() int { return len(g) }
+func (g GenSchemaList) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenSchemaList) Less(i, j int) bool {
+ a, okA := g[i].Extensions[xOrder].(float64)
+ b, okB := g[j].Extensions[xOrder].(float64)
+
+ // If both properties have x-order defined, then the one with lower x-order is smaller
+ if okA && okB {
+ return a < b
+ }
+
+ // If only the first property has x-order defined, then it is smaller
+ if okA {
+ return true
+ }
+
+ // If only the second property has x-order defined, then it is smaller
+ if okB {
+ return false
+ }
+
+ // If neither property has x-order defined, then the one with lower lexicographic name is smaller
+ return g[i].Name < g[j].Name
+}
+
+type sharedValidations struct {
+ spec.SchemaValidations
+
+ HasValidations bool
+ HasContextValidations bool
+ Required bool
+ HasSliceValidations bool
+ ItemsEnum []interface{}
+
+ // NOTE: "patternProperties" and "dependencies" not supported by Swagger 2.0
+}
+
+// GenResponse represents a response object for code generation
+type GenResponse struct {
+ Package string
+ ModelsPackage string
+ ReceiverName string
+ Name string
+ Description string
+
+ IsSuccess bool
+
+ Code int
+ Method string
+ Path string
+ Headers GenHeaders
+ Schema *GenSchema
+ AllowsForStreaming bool
+
+ Imports map[string]string
+ DefaultImports map[string]string
+
+ Extensions map[string]interface{}
+
+ StrictResponders bool
+ OperationName string
+ Examples GenResponseExamples
+}
+
+// GenResponseExamples is a sortable collection []GenResponseExample
+type GenResponseExamples []GenResponseExample
+
+func (g GenResponseExamples) Len() int { return len(g) }
+func (g GenResponseExamples) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenResponseExamples) Less(i, j int) bool { return g[i].MediaType < g[j].MediaType }
+
+// GenResponseExample captures an example provided for a response for some mime type
+type GenResponseExample struct {
+ MediaType string
+ Example interface{}
+}
+
+// GenHeader represents a header on a response for code generation
+type GenHeader struct {
+ resolvedType
+ sharedValidations
+
+ Package string
+ ReceiverName string
+ IndexVar string
+
+ ID string
+ Name string
+ Path string
+ ValueExpression string
+
+ Title string
+ Description string
+ Default interface{}
+ HasDefault bool
+
+ CollectionFormat string
+
+ Child *GenItems
+ Parent *GenItems
+
+ Converter string
+ Formatter string
+
+ ZeroValue string
+}
+
+// ItemsDepth returns a string "items.items..." with as many items as the level of nesting of the array.
+// For a header objects it always returns "".
+func (h *GenHeader) ItemsDepth() string {
+ // NOTE: this is currently used by templates to generate explicit comments in nested structures
+ return ""
+}
+
+// ToString returns a string conversion expression for the header
+func (h GenHeader) ToString() string {
+ return h.resolvedType.ToString(h.ValueExpression)
+}
+
+// GenHeaders is a sorted collection of headers for codegen
+type GenHeaders []GenHeader
+
+func (g GenHeaders) Len() int { return len(g) }
+func (g GenHeaders) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenHeaders) Less(i, j int) bool { return g[i].Name < g[j].Name }
+
+// HasSomeDefaults returns true is at least one header has a default value set
+func (g GenHeaders) HasSomeDefaults() bool {
+ // NOTE: this is currently used by templates to avoid empty constructs
+ for _, header := range g {
+ if header.HasDefault {
+ return true
+ }
+ }
+ return false
+}
+
+// GenParameter is used to represent
+// a parameter or a header for code generation.
+type GenParameter struct {
+ resolvedType
+ sharedValidations
+
+ ID string
+ Name string
+ ModelsPackage string
+ Path string
+ ValueExpression string
+ IndexVar string
+ KeyVar string
+ ReceiverName string
+ Location string
+ Title string
+ Description string
+ Converter string
+ Formatter string
+
+ Schema *GenSchema
+
+ CollectionFormat string
+
+ Child *GenItems
+ Parent *GenItems
+
+ // Unused
+ // BodyParam *GenParameter
+
+ Default interface{}
+ HasDefault bool
+ ZeroValue string
+ AllowEmptyValue bool
+
+ // validation strategy for Body params, which may mix model and simple constructs.
+ // Distinguish the following cases:
+ // - HasSimpleBodyParams: body is an inline simple type
+ // - HasModelBodyParams: body is a model objectd
+ // - HasSimpleBodyItems: body is an inline array of simple type
+ // - HasModelBodyItems: body is an array of model objects
+ // - HasSimpleBodyMap: body is a map of simple objects (possibly arrays)
+ // - HasModelBodyMap: body is a map of model objects
+ HasSimpleBodyParams bool
+ HasModelBodyParams bool
+ HasSimpleBodyItems bool
+ HasModelBodyItems bool
+ HasSimpleBodyMap bool
+ HasModelBodyMap bool
+
+ Extensions map[string]interface{}
+}
+
+// IsQueryParam returns true when this parameter is a query param
+func (g *GenParameter) IsQueryParam() bool {
+ return g.Location == "query"
+}
+
+// IsPathParam returns true when this parameter is a path param
+func (g *GenParameter) IsPathParam() bool {
+ return g.Location == "path"
+}
+
+// IsFormParam returns true when this parameter is a form param
+func (g *GenParameter) IsFormParam() bool {
+ return g.Location == "formData"
+}
+
+// IsHeaderParam returns true when this parameter is a header param
+func (g *GenParameter) IsHeaderParam() bool {
+ return g.Location == "header"
+}
+
+// IsBodyParam returns true when this parameter is a body param
+func (g *GenParameter) IsBodyParam() bool {
+ return g.Location == "body"
+}
+
+// IsFileParam returns true when this parameter is a file param
+func (g *GenParameter) IsFileParam() bool {
+ return g.SwaggerType == "file"
+}
+
+// ItemsDepth returns a string "items.items..." with as many items as the level of nesting of the array.
+// For a parameter object, it always returns "".
+func (g *GenParameter) ItemsDepth() string {
+ // NOTE: this is currently used by templates to generate explicit comments in nested structures
+ return ""
+}
+
+// UnderlyingType tells the go type or the aliased go type
+func (g GenParameter) UnderlyingType() string {
+ return g.GoType
+}
+
+// ToString returns a string conversion expression for the parameter
+func (g GenParameter) ToString() string {
+ return g.resolvedType.ToString(g.ValueExpression)
+}
+
+// GenParameters represents a sorted parameter collection
+type GenParameters []GenParameter
+
+func (g GenParameters) Len() int { return len(g) }
+func (g GenParameters) Less(i, j int) bool { return g[i].Name < g[j].Name }
+func (g GenParameters) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+
+// HasSomeDefaults returns true is at least one parameter has a default value set
+func (g GenParameters) HasSomeDefaults() bool {
+ // NOTE: this is currently used by templates to avoid empty constructs
+ for _, param := range g {
+ if param.HasDefault {
+ return true
+ }
+ }
+ return false
+}
+
+// GenItems represents the collection items for a collection parameter
+type GenItems struct {
+ sharedValidations
+ resolvedType
+
+ Name string
+ Path string
+ ValueExpression string
+ CollectionFormat string
+ Child *GenItems
+ Parent *GenItems
+ Converter string
+ Formatter string
+
+ Location string
+ IndexVar string
+ KeyVar string
+
+ // instructs generator to skip the splitting and parsing from CollectionFormat
+ SkipParse bool
+ // instructs generator that some nested structure needs an higher level loop index
+ NeedsIndex bool
+}
+
+// ItemsDepth returns a string "items.items..." with as many items as the level of nesting of the array.
+func (g *GenItems) ItemsDepth() string {
+ // NOTE: this is currently used by templates to generate explicit comments in nested structures
+ current := g
+ i := 1
+ for current.Parent != nil {
+ i++
+ current = current.Parent
+ }
+ return strings.Repeat("items.", i)
+}
+
+// UnderlyingType tells the go type or the aliased go type
+func (g GenItems) UnderlyingType() string {
+ return g.GoType
+}
+
+// ToString returns a string conversion expression for the item
+func (g GenItems) ToString() string {
+ return g.resolvedType.ToString(g.ValueExpression)
+}
+
+// GenOperationGroup represents a named (tagged) group of operations
+type GenOperationGroup struct {
+ GenCommon
+ Name string
+ Operations GenOperations
+
+ Summary string
+ Description string
+ Imports map[string]string
+ DefaultImports map[string]string
+ RootPackage string
+ GenOpts *GenOpts
+ PackageAlias string
+}
+
+// GenOperationGroups is a sorted collection of operation groups
+type GenOperationGroups []GenOperationGroup
+
+func (g GenOperationGroups) Len() int { return len(g) }
+func (g GenOperationGroups) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenOperationGroups) Less(i, j int) bool { return g[i].Name < g[j].Name }
+
+// GenStatusCodeResponses a container for status code responses
+type GenStatusCodeResponses []GenResponse
+
+func (g GenStatusCodeResponses) Len() int { return len(g) }
+func (g GenStatusCodeResponses) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenStatusCodeResponses) Less(i, j int) bool { return g[i].Code < g[j].Code }
+
+// MarshalJSON marshals these responses to json
+//
+// This is used by DumpData.
+func (g GenStatusCodeResponses) MarshalJSON() ([]byte, error) {
+ if g == nil {
+ return nil, nil
+ }
+ responses := make(GenStatusCodeResponses, len(g))
+ copy(responses, g)
+ // order marshalled output
+ sort.Sort(responses)
+
+ var buf bytes.Buffer
+ buf.WriteRune('{')
+ for i, v := range responses {
+ rb, err := json.Marshal(v)
+ if err != nil {
+ return nil, err
+ }
+ if i > 0 {
+ buf.WriteRune(',')
+ }
+ buf.WriteString(fmt.Sprintf("%q:", strconv.Itoa(v.Code)))
+ buf.Write(rb)
+ }
+ buf.WriteRune('}')
+ return buf.Bytes(), nil
+}
+
+// UnmarshalJSON unmarshals this GenStatusCodeResponses from json
+func (g *GenStatusCodeResponses) UnmarshalJSON(data []byte) error {
+ var dd map[string]GenResponse
+ if err := json.Unmarshal(data, &dd); err != nil {
+ return err
+ }
+ var gg GenStatusCodeResponses
+ for _, v := range dd {
+ gg = append(gg, v)
+ }
+ sort.Sort(gg)
+ *g = gg
+ return nil
+}
+
+// GenOperation represents an operation for code generation
+type GenOperation struct {
+ GenCommon
+ Package string
+ ReceiverName string
+ Name string
+ Summary string
+ Description string
+ Method string
+ Path string
+ BasePath string
+ Tags []string
+ UseTags bool
+ RootPackage string
+
+ Imports map[string]string
+ DefaultImports map[string]string
+ ExtraSchemas GenSchemaList
+ PackageAlias string
+
+ Authorized bool
+ Security []GenSecurityRequirements // resolved security requirements for the operation
+ SecurityDefinitions GenSecuritySchemes
+ SecurityRequirements []analysis.SecurityRequirement // original security requirements as per the spec (for doc)
+ Principal string
+ PrincipalIsNullable bool
+
+ SuccessResponse *GenResponse
+ SuccessResponses []GenResponse
+ Responses GenStatusCodeResponses
+ DefaultResponse *GenResponse
+
+ Params GenParameters
+ QueryParams GenParameters
+ PathParams GenParameters
+ HeaderParams GenParameters
+ FormParams GenParameters
+ HasQueryParams bool
+ HasPathParams bool
+ HasHeaderParams bool
+ HasFormParams bool
+ HasFormValueParams bool
+ HasFileParams bool
+ HasBodyParams bool
+ HasStreamingResponse bool
+
+ Schemes []string
+ ExtraSchemes []string
+ SchemeOverrides []string // original scheme overrides for operation, as per spec (for doc)
+ ExtraSchemeOverrides []string // original extra scheme overrides for operation, as per spec (for doc)
+ ProducesMediaTypes []string
+ ConsumesMediaTypes []string
+ TimeoutName string
+
+ Extensions map[string]interface{}
+
+ StrictResponders bool
+ ExternalDocs *spec.ExternalDocumentation
+ Produces []string // original produces for operation (for doc)
+ Consumes []string // original consumes for operation (for doc)
+}
+
+// GenOperations represents a list of operations to generate
+// this implements a sort by operation id
+type GenOperations []GenOperation
+
+func (g GenOperations) Len() int { return len(g) }
+func (g GenOperations) Less(i, j int) bool { return g[i].Name < g[j].Name }
+func (g GenOperations) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+
+// GenApp represents all the meta data needed to generate an application
+// from a swagger spec
+type GenApp struct {
+ GenCommon
+ APIPackage string
+ ServerPackageAlias string
+ ImplementationPackageAlias string
+ APIPackageAlias string
+ Package string
+ ReceiverName string
+ Name string
+ Principal string
+ PrincipalIsNullable bool
+ DefaultConsumes string
+ DefaultProduces string
+ Host string
+ BasePath string
+ Info *spec.Info
+ ExternalDocs *spec.ExternalDocumentation
+ Tags []spec.Tag
+ Imports map[string]string
+ DefaultImports map[string]string
+ Schemes []string
+ ExtraSchemes []string
+ Consumes GenSerGroups
+ Produces GenSerGroups
+ SecurityDefinitions GenSecuritySchemes
+ SecurityRequirements []analysis.SecurityRequirement // original security requirements as per the spec (for doc)
+ Models []GenDefinition
+ Operations GenOperations
+ OperationGroups GenOperationGroups
+ SwaggerJSON string
+ // Embedded specs: this is important for when the generated server adds routes.
+ // NOTE: there is a distinct advantage to having this in runtime rather than generated code.
+ // We are not ever going to generate the router.
+ // If embedding spec is an issue (e.g. memory usage), this can be excluded with the --exclude-spec
+ // generation option. Alternative methods to serve spec (e.g. from disk, ...) may be implemented by
+ // adding a middleware to the generated API.
+ FlatSwaggerJSON string
+ ExcludeSpec bool
+ GenOpts *GenOpts
+}
+
+// UseGoStructFlags returns true when no strategy is specified or it is set to "go-flags"
+func (g *GenApp) UseGoStructFlags() bool {
+ if g.GenOpts == nil {
+ return true
+ }
+ return g.GenOpts.FlagStrategy == "" || g.GenOpts.FlagStrategy == "go-flags"
+}
+
+// UsePFlags returns true when the flag strategy is set to pflag
+func (g *GenApp) UsePFlags() bool {
+ return g.GenOpts != nil && strings.HasPrefix(g.GenOpts.FlagStrategy, "pflag")
+}
+
+// UseFlags returns true when the flag strategy is set to flag
+func (g *GenApp) UseFlags() bool {
+ return g.GenOpts != nil && strings.HasPrefix(g.GenOpts.FlagStrategy, "flag")
+}
+
+// UseIntermediateMode for https://wiki.mozilla.org/Security/Server_Side_TLS#Intermediate_compatibility_.28default.29
+func (g *GenApp) UseIntermediateMode() bool {
+ return g.GenOpts != nil && g.GenOpts.CompatibilityMode == "intermediate"
+}
+
+// UseModernMode for https://wiki.mozilla.org/Security/Server_Side_TLS#Modern_compatibility
+func (g *GenApp) UseModernMode() bool {
+ return g.GenOpts == nil || g.GenOpts.CompatibilityMode == "" || g.GenOpts.CompatibilityMode == "modern"
+}
+
+// GenSerGroups sorted representation of serializer groups
+type GenSerGroups []GenSerGroup
+
+func (g GenSerGroups) Len() int { return len(g) }
+func (g GenSerGroups) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenSerGroups) Less(i, j int) bool { return g[i].Name < g[j].Name }
+
+// GenSerGroup represents a group of serializers: this links a serializer to a list of
+// prioritized media types (mime).
+type GenSerGroup struct {
+ GenSerializer
+
+ // All media types for this serializer. The redundant representation allows for easier use in templates
+ AllSerializers GenSerializers
+}
+
+// GenSerializers sorted representation of serializers
+type GenSerializers []GenSerializer
+
+func (g GenSerializers) Len() int { return len(g) }
+func (g GenSerializers) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenSerializers) Less(i, j int) bool { return g[i].MediaType < g[j].MediaType }
+
+// GenSerializer represents a single serializer for a particular media type
+type GenSerializer struct {
+ AppName string // Application name
+ ReceiverName string
+ Name string // Name of the Producer/Consumer (e.g. json, yaml, txt, bin)
+ MediaType string // mime
+ Implementation string // func implementing the Producer/Consumer
+ Parameters []string // parameters supported by this serializer
+}
+
+// GenSecurityScheme represents a security scheme for code generation
+type GenSecurityScheme struct {
+ AppName string
+ ID string
+ Name string
+ ReceiverName string
+ IsBasicAuth bool
+ IsAPIKeyAuth bool
+ IsOAuth2 bool
+ Scopes []string
+ Source string
+ Principal string
+ PrincipalIsNullable bool
+
+ // from spec.SecurityScheme
+ Description string
+ Type string
+ In string
+ Flow string
+ AuthorizationURL string
+ TokenURL string
+ Extensions map[string]interface{}
+ ScopesDesc []GenSecurityScope
+}
+
+// GenSecuritySchemes sorted representation of serializers
+type GenSecuritySchemes []GenSecurityScheme
+
+func (g GenSecuritySchemes) Len() int { return len(g) }
+func (g GenSecuritySchemes) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenSecuritySchemes) Less(i, j int) bool { return g[i].ID < g[j].ID }
+
+// GenSecurityRequirement represents a security requirement for an operation
+type GenSecurityRequirement struct {
+ Name string
+ Scopes []string
+}
+
+// GenSecurityScope represents a scope descriptor for an OAuth2 security scheme
+type GenSecurityScope struct {
+ Name string
+ Description string
+}
+
+// GenSecurityRequirements represents a compounded security requirement specification.
+// In a []GenSecurityRequirements complete requirements specification,
+// outer elements are interpreted as optional requirements (OR), and
+// inner elements are interpreted as jointly required (AND).
+type GenSecurityRequirements []GenSecurityRequirement
+
+func (g GenSecurityRequirements) Len() int { return len(g) }
+func (g GenSecurityRequirements) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenSecurityRequirements) Less(i, j int) bool { return g[i].Name < g[j].Name }
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/support.go b/vendor/github.com/go-swagger/go-swagger/generator/support.go
new file mode 100644
index 000000000..df3996df4
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/support.go
@@ -0,0 +1,546 @@
+// Copyright 2015 go-swagger maintainers
+
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "bytes"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log"
+ "path"
+ "path/filepath"
+ "sort"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+// GenerateServer generates a server application
+func GenerateServer(name string, modelNames, operationIDs []string, opts *GenOpts) error {
+ generator, err := newAppGenerator(name, modelNames, operationIDs, opts)
+ if err != nil {
+ return err
+ }
+ return generator.Generate()
+}
+
+// GenerateSupport generates the supporting files for an API
+func GenerateSupport(name string, modelNames, operationIDs []string, opts *GenOpts) error {
+ generator, err := newAppGenerator(name, modelNames, operationIDs, opts)
+ if err != nil {
+ return err
+ }
+ return generator.GenerateSupport(nil)
+}
+
+// GenerateMarkdown documentation for a swagger specification
+func GenerateMarkdown(output string, modelNames, operationIDs []string, opts *GenOpts) error {
+ if output == "." || output == "" {
+ output = "markdown.md"
+ }
+
+ if err := opts.EnsureDefaults(); err != nil {
+ return err
+ }
+ MarkdownSectionOpts(opts, output)
+
+ generator, err := newAppGenerator("", modelNames, operationIDs, opts)
+ if err != nil {
+ return err
+ }
+
+ return generator.GenerateMarkdown()
+}
+
+func newAppGenerator(name string, modelNames, operationIDs []string, opts *GenOpts) (*appGenerator, error) {
+ if err := opts.CheckOpts(); err != nil {
+ return nil, err
+ }
+
+ if err := opts.setTemplates(); err != nil {
+ return nil, err
+ }
+
+ specDoc, analyzed, err := opts.analyzeSpec()
+ if err != nil {
+ return nil, err
+ }
+
+ models, err := gatherModels(specDoc, modelNames)
+ if err != nil {
+ return nil, err
+ }
+
+ operations := gatherOperations(analyzed, operationIDs)
+
+ if len(operations) == 0 && !opts.IgnoreOperations {
+ return nil, errors.New("no operations were selected")
+ }
+
+ opts.Name = appNameOrDefault(specDoc, name, defaultServerName)
+ if opts.IncludeMain && opts.MainPackage == "" {
+ // default target for the generated main
+ opts.MainPackage = swag.ToCommandName(mainNameOrDefault(specDoc, name, defaultServerName) + "-server")
+ }
+
+ apiPackage := opts.LanguageOpts.ManglePackagePath(opts.APIPackage, defaultOperationsTarget)
+ return &appGenerator{
+ Name: opts.Name,
+ Receiver: "o",
+ SpecDoc: specDoc,
+ Analyzed: analyzed,
+ Models: models,
+ Operations: operations,
+ Target: opts.Target,
+ DumpData: opts.DumpData,
+ Package: opts.LanguageOpts.ManglePackageName(apiPackage, defaultOperationsTarget),
+ APIPackage: apiPackage,
+ ModelsPackage: opts.LanguageOpts.ManglePackagePath(opts.ModelPackage, defaultModelsTarget),
+ ServerPackage: opts.LanguageOpts.ManglePackagePath(opts.ServerPackage, defaultServerTarget),
+ ClientPackage: opts.LanguageOpts.ManglePackagePath(opts.ClientPackage, defaultClientTarget),
+ OperationsPackage: filepath.Join(opts.LanguageOpts.ManglePackagePath(opts.ServerPackage, defaultServerTarget), apiPackage),
+ Principal: opts.PrincipalAlias(),
+ DefaultScheme: opts.DefaultScheme,
+ DefaultProduces: opts.DefaultProduces,
+ DefaultConsumes: opts.DefaultConsumes,
+ GenOpts: opts,
+ }, nil
+}
+
+type appGenerator struct {
+ Name string
+ Receiver string
+ SpecDoc *loads.Document
+ Analyzed *analysis.Spec
+ Package string
+ APIPackage string
+ ModelsPackage string
+ ServerPackage string
+ ClientPackage string
+ OperationsPackage string
+ MainPackage string
+ Principal string
+ Models map[string]spec.Schema
+ Operations map[string]opRef
+ Target string
+ DumpData bool
+ DefaultScheme string
+ DefaultProduces string
+ DefaultConsumes string
+ GenOpts *GenOpts
+}
+
+func (a *appGenerator) Generate() error {
+ app, err := a.makeCodegenApp()
+ if err != nil {
+ return err
+ }
+
+ if a.DumpData {
+ return dumpData(app)
+ }
+
+ // NOTE: relative to previous implem with chan.
+ // IPC removed concurrent execution because of the FuncMap that is being shared
+ // templates are now lazy loaded so there is concurrent map access I can't guard
+ if a.GenOpts.IncludeModel {
+ log.Printf("rendering %d models", len(app.Models))
+ for _, md := range app.Models {
+ mod := md
+ mod.IncludeModel = true
+ mod.IncludeValidator = a.GenOpts.IncludeValidator
+ if err := a.GenOpts.renderDefinition(&mod); err != nil {
+ return err
+ }
+ }
+ }
+
+ if a.GenOpts.IncludeHandler {
+ log.Printf("rendering %d operation groups (tags)", app.OperationGroups.Len())
+ for _, g := range app.OperationGroups {
+ opg := g
+ log.Printf("rendering %d operations for %s", opg.Operations.Len(), opg.Name)
+ for _, p := range opg.Operations {
+ op := p
+ if err := a.GenOpts.renderOperation(&op); err != nil {
+ return err
+ }
+ }
+ // optional OperationGroups templates generation
+ if err := a.GenOpts.renderOperationGroup(&opg); err != nil {
+ return fmt.Errorf("error while rendering operation group: %v", err)
+ }
+ }
+ }
+
+ if a.GenOpts.IncludeSupport {
+ log.Printf("rendering support")
+ if err := a.GenerateSupport(&app); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (a *appGenerator) GenerateSupport(ap *GenApp) error {
+ app := ap
+ if ap == nil {
+ // allows for calling GenerateSupport standalone
+ ca, err := a.makeCodegenApp()
+ if err != nil {
+ return err
+ }
+ app = &ca
+ }
+
+ baseImport := a.GenOpts.LanguageOpts.baseImport(a.Target)
+ serverPath := path.Join(baseImport,
+ a.GenOpts.LanguageOpts.ManglePackagePath(a.ServerPackage, defaultServerTarget))
+
+ pkgAlias := deconflictPkg(importAlias(serverPath), renameServerPackage)
+ app.DefaultImports[pkgAlias] = serverPath
+ app.ServerPackageAlias = pkgAlias
+
+ // add client import for cli generation
+ clientPath := path.Join(baseImport,
+ a.GenOpts.LanguageOpts.ManglePackagePath(a.ClientPackage, defaultClientTarget))
+ clientPkgAlias := importAlias(clientPath)
+ app.DefaultImports[clientPkgAlias] = clientPath
+
+ return a.GenOpts.renderApplication(app)
+}
+
+func (a *appGenerator) GenerateMarkdown() error {
+ app, err := a.makeCodegenApp()
+ if err != nil {
+ return err
+ }
+
+ return a.GenOpts.renderApplication(&app)
+}
+
+func (a *appGenerator) makeSecuritySchemes() GenSecuritySchemes {
+ requiredSecuritySchemes := make(map[string]spec.SecurityScheme, len(a.Analyzed.RequiredSecuritySchemes()))
+ for _, scheme := range a.Analyzed.RequiredSecuritySchemes() {
+ if req, ok := a.SpecDoc.Spec().SecurityDefinitions[scheme]; ok && req != nil {
+ requiredSecuritySchemes[scheme] = *req
+ }
+ }
+ return gatherSecuritySchemes(requiredSecuritySchemes, a.Name, a.Principal, a.Receiver, a.GenOpts.PrincipalIsNullable())
+}
+
+func (a *appGenerator) makeCodegenApp() (GenApp, error) {
+ log.Println("building a plan for generation")
+
+ sw := a.SpecDoc.Spec()
+ receiver := a.Receiver
+
+ consumes, _ := a.makeConsumes()
+ produces, _ := a.makeProduces()
+ security := a.makeSecuritySchemes()
+
+ log.Println("generation target", a.Target)
+
+ baseImport := a.GenOpts.LanguageOpts.baseImport(a.Target)
+ defaultImports := a.GenOpts.defaultImports()
+
+ imports := make(map[string]string, 50)
+ alias := deconflictPkg(a.GenOpts.LanguageOpts.ManglePackageName(a.OperationsPackage, defaultOperationsTarget), renameAPIPackage)
+ imports[alias] = path.Join(
+ baseImport,
+ a.GenOpts.LanguageOpts.ManglePackagePath(a.OperationsPackage, defaultOperationsTarget))
+
+ implAlias := ""
+ if a.GenOpts.ImplementationPackage != "" {
+ implAlias = deconflictPkg(a.GenOpts.LanguageOpts.ManglePackageName(a.GenOpts.ImplementationPackage, defaultImplementationTarget), renameImplementationPackage)
+ imports[implAlias] = a.GenOpts.ImplementationPackage
+ }
+
+ log.Printf("planning definitions (found: %d)", len(a.Models))
+
+ genModels := make(GenDefinitions, 0, len(a.Models))
+ for mn, m := range a.Models {
+ model, err := makeGenDefinition(
+ mn,
+ a.ModelsPackage,
+ m,
+ a.SpecDoc,
+ a.GenOpts,
+ )
+ if err != nil {
+ return GenApp{}, fmt.Errorf("error in model %s while planning definitions: %v", mn, err)
+ }
+ if model != nil {
+ if !model.External {
+ genModels = append(genModels, *model)
+ }
+
+ // Copy model imports to operation imports
+ // TODO(fredbi): mangle model pkg aliases
+ for alias, pkg := range model.Imports {
+ target := a.GenOpts.LanguageOpts.ManglePackageName(alias, "")
+ imports[target] = pkg
+ }
+ }
+ }
+ sort.Sort(genModels)
+
+ log.Printf("planning operations (found: %d)", len(a.Operations))
+
+ genOps := make(GenOperations, 0, len(a.Operations))
+ for operationName, opp := range a.Operations {
+ o := opp.Op
+ o.ID = operationName
+
+ bldr := codeGenOpBuilder{
+ ModelsPackage: a.ModelsPackage,
+ Principal: a.GenOpts.PrincipalAlias(),
+ Target: a.Target,
+ DefaultImports: defaultImports,
+ Imports: imports,
+ DefaultScheme: a.DefaultScheme,
+ Doc: a.SpecDoc,
+ Analyzed: a.Analyzed,
+ BasePath: a.SpecDoc.BasePath(),
+ GenOpts: a.GenOpts,
+ Name: operationName,
+ Operation: *o,
+ Method: opp.Method,
+ Path: opp.Path,
+ IncludeValidator: a.GenOpts.IncludeValidator,
+ APIPackage: a.APIPackage, // defaults to main operations package
+ DefaultProduces: a.DefaultProduces,
+ DefaultConsumes: a.DefaultConsumes,
+ }
+
+ tag, tags, ok := bldr.analyzeTags()
+ if !ok {
+ continue // operation filtered according to CLI params
+ }
+
+ bldr.Authed = len(a.Analyzed.SecurityRequirementsFor(o)) > 0
+ bldr.Security = a.Analyzed.SecurityRequirementsFor(o)
+ bldr.SecurityDefinitions = a.Analyzed.SecurityDefinitionsFor(o)
+ bldr.RootAPIPackage = a.GenOpts.LanguageOpts.ManglePackageName(a.ServerPackage, defaultServerTarget)
+
+ st := o.Tags
+ if a.GenOpts != nil {
+ st = a.GenOpts.Tags
+ }
+ intersected := intersectTags(o.Tags, st)
+ if len(st) > 0 && len(intersected) == 0 {
+ continue
+ }
+
+ op, err := bldr.MakeOperation()
+ if err != nil {
+ return GenApp{}, err
+ }
+
+ op.ReceiverName = receiver
+ op.Tags = tags // ordered tags for this operation, possibly filtered by CLI params
+ genOps = append(genOps, op)
+
+ if !a.GenOpts.SkipTagPackages && tag != "" {
+ importPath := filepath.ToSlash(
+ path.Join(
+ baseImport,
+ a.GenOpts.LanguageOpts.ManglePackagePath(a.OperationsPackage, defaultOperationsTarget),
+ a.GenOpts.LanguageOpts.ManglePackageName(bldr.APIPackage, defaultOperationsTarget),
+ ))
+ defaultImports[bldr.APIPackageAlias] = importPath
+ }
+ }
+ sort.Sort(genOps)
+
+ opsGroupedByPackage := make(map[string]GenOperations, len(genOps))
+ for _, operation := range genOps {
+ opsGroupedByPackage[operation.PackageAlias] = append(opsGroupedByPackage[operation.PackageAlias], operation)
+ }
+
+ log.Printf("grouping operations into packages (packages: %d)", len(opsGroupedByPackage))
+
+ opGroups := make(GenOperationGroups, 0, len(opsGroupedByPackage))
+ for k, v := range opsGroupedByPackage {
+ log.Printf("operations for package packages %q (found: %d)", k, len(v))
+ sort.Sort(v)
+ // trim duplicate extra schemas within the same package
+ vv := make(GenOperations, 0, len(v))
+ seenExtraSchema := make(map[string]bool)
+ for _, op := range v {
+ uniqueExtraSchemas := make(GenSchemaList, 0, len(op.ExtraSchemas))
+ for _, xs := range op.ExtraSchemas {
+ if _, alreadyThere := seenExtraSchema[xs.Name]; !alreadyThere {
+ seenExtraSchema[xs.Name] = true
+ uniqueExtraSchemas = append(uniqueExtraSchemas, xs)
+ }
+ }
+ op.ExtraSchemas = uniqueExtraSchemas
+ vv = append(vv, op)
+ }
+ var pkg string
+ if len(vv) > 0 {
+ pkg = vv[0].Package
+ } else {
+ pkg = k
+ }
+
+ opGroup := GenOperationGroup{
+ GenCommon: GenCommon{
+ Copyright: a.GenOpts.Copyright,
+ TargetImportPath: baseImport,
+ },
+ Name: pkg,
+ PackageAlias: k,
+ Operations: vv,
+ DefaultImports: defaultImports,
+ Imports: imports,
+ RootPackage: a.APIPackage,
+ GenOpts: a.GenOpts,
+ }
+ opGroups = append(opGroups, opGroup)
+ }
+ sort.Sort(opGroups)
+
+ log.Println("planning meta data and facades")
+
+ var collectedSchemes, extraSchemes []string
+ for _, op := range genOps {
+ collectedSchemes = concatUnique(collectedSchemes, op.Schemes)
+ extraSchemes = concatUnique(extraSchemes, op.ExtraSchemes)
+ }
+ sort.Strings(collectedSchemes)
+ sort.Strings(extraSchemes)
+
+ host := "localhost"
+ if sw.Host != "" {
+ host = sw.Host
+ }
+
+ basePath := "/"
+ if sw.BasePath != "" {
+ basePath = sw.BasePath
+ }
+
+ jsonb, _ := json.MarshalIndent(a.SpecDoc.OrigSpec(), "", " ")
+ flatjsonb, _ := json.MarshalIndent(a.SpecDoc.Spec(), "", " ")
+
+ return GenApp{
+ GenCommon: GenCommon{
+ Copyright: a.GenOpts.Copyright,
+ TargetImportPath: baseImport,
+ },
+ APIPackage: a.GenOpts.LanguageOpts.ManglePackageName(a.ServerPackage, defaultServerTarget),
+ APIPackageAlias: alias,
+ ImplementationPackageAlias: implAlias,
+ Package: a.Package,
+ ReceiverName: receiver,
+ Name: a.Name,
+ Host: host,
+ BasePath: basePath,
+ Schemes: schemeOrDefault(collectedSchemes, a.DefaultScheme),
+ ExtraSchemes: extraSchemes,
+ ExternalDocs: trimExternalDoc(sw.ExternalDocs),
+ Tags: trimTags(sw.Tags),
+ Info: trimInfo(sw.Info),
+ Consumes: consumes,
+ Produces: produces,
+ DefaultConsumes: a.DefaultConsumes,
+ DefaultProduces: a.DefaultProduces,
+ DefaultImports: defaultImports,
+ Imports: imports,
+ SecurityDefinitions: security,
+ SecurityRequirements: securityRequirements(a.SpecDoc.Spec().Security), // top level securityRequirements
+ Models: genModels,
+ Operations: genOps,
+ OperationGroups: opGroups,
+ Principal: a.GenOpts.PrincipalAlias(),
+ SwaggerJSON: generateReadableSpec(jsonb),
+ FlatSwaggerJSON: generateReadableSpec(flatjsonb),
+ ExcludeSpec: a.GenOpts.ExcludeSpec,
+ GenOpts: a.GenOpts,
+
+ PrincipalIsNullable: a.GenOpts.PrincipalIsNullable(),
+ }, nil
+}
+
+// generateReadableSpec makes swagger json spec as a string instead of bytes
+// the only character that needs to be escaped is '`' symbol, since it cannot be escaped in the GO string
+// that is quoted as `string data`. The function doesn't care about the beginning or the ending of the
+// string it escapes since all data that needs to be escaped is always in the middle of the swagger spec.
+func generateReadableSpec(spec []byte) string {
+ buf := &bytes.Buffer{}
+ for _, b := range string(spec) {
+ if b == '`' {
+ buf.WriteString("`+\"`\"+`")
+ } else {
+ buf.WriteRune(b)
+ }
+ }
+ return buf.String()
+}
+
+func trimExternalDoc(in *spec.ExternalDocumentation) *spec.ExternalDocumentation {
+ if in == nil {
+ return nil
+ }
+
+ return &spec.ExternalDocumentation{
+ URL: in.URL,
+ Description: trimBOM(in.Description),
+ }
+}
+
+func trimInfo(in *spec.Info) *spec.Info {
+ if in == nil {
+ return nil
+ }
+
+ return &spec.Info{
+ InfoProps: spec.InfoProps{
+ Contact: in.Contact,
+ Title: trimBOM(in.Title),
+ Description: trimBOM(in.Description),
+ TermsOfService: trimBOM(in.TermsOfService),
+ License: in.License,
+ Version: in.Version,
+ },
+ VendorExtensible: in.VendorExtensible,
+ }
+}
+
+func trimTags(in []spec.Tag) []spec.Tag {
+ if in == nil {
+ return nil
+ }
+
+ tags := make([]spec.Tag, 0, len(in))
+
+ for _, tag := range in {
+ tags = append(tags, spec.Tag{
+ TagProps: spec.TagProps{
+ Name: tag.Name,
+ Description: trimBOM(tag.Description),
+ ExternalDocs: trimExternalDoc(tag.ExternalDocs),
+ },
+ })
+ }
+
+ return tags
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/template_repo.go b/vendor/github.com/go-swagger/go-swagger/generator/template_repo.go
new file mode 100644
index 000000000..e78ae602a
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/template_repo.go
@@ -0,0 +1,855 @@
+package generator
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "math"
+ "os"
+ "path"
+ "path/filepath"
+ "reflect"
+ "strconv"
+ "strings"
+ "sync"
+ "text/template"
+ "text/template/parse"
+ "unicode"
+
+ "log"
+
+ "github.com/Masterminds/sprig/v3"
+ "github.com/go-openapi/inflect"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/swag"
+ "github.com/kr/pretty"
+)
+
+var (
+ assets map[string][]byte
+ protectedTemplates map[string]bool
+
+ // FuncMapFunc yields a map with all functions for templates
+ FuncMapFunc func(*LanguageOpts) template.FuncMap
+
+ templates *Repository
+
+ docFormat map[string]string
+)
+
+func initTemplateRepo() {
+ FuncMapFunc = DefaultFuncMap
+
+ // this makes the ToGoName func behave with the special
+ // prefixing rule above
+ swag.GoNamePrefixFunc = prefixForName
+
+ assets = defaultAssets()
+ protectedTemplates = defaultProtectedTemplates()
+ templates = NewRepository(FuncMapFunc(DefaultLanguageFunc()))
+
+ docFormat = map[string]string{
+ "binary": "binary (byte stream)",
+ "byte": "byte (base64 string)",
+ }
+}
+
+// DefaultFuncMap yields a map with default functions for use in the templates.
+// These are available in every template
+func DefaultFuncMap(lang *LanguageOpts) template.FuncMap {
+ f := sprig.TxtFuncMap()
+ extra := template.FuncMap{
+ "pascalize": pascalize,
+ "camelize": swag.ToJSONName,
+ "varname": lang.MangleVarName,
+ "humanize": swag.ToHumanNameLower,
+ "snakize": lang.MangleFileName,
+ "toPackagePath": func(name string) string {
+ return filepath.FromSlash(lang.ManglePackagePath(name, ""))
+ },
+ "toPackage": func(name string) string {
+ return lang.ManglePackagePath(name, "")
+ },
+ "toPackageName": func(name string) string {
+ return lang.ManglePackageName(name, "")
+ },
+ "dasherize": swag.ToCommandName,
+ "pluralizeFirstWord": pluralizeFirstWord,
+ "json": asJSON,
+ "prettyjson": asPrettyJSON,
+ "hasInsecure": func(arg []string) bool {
+ return swag.ContainsStringsCI(arg, "http") || swag.ContainsStringsCI(arg, "ws")
+ },
+ "hasSecure": func(arg []string) bool {
+ return swag.ContainsStringsCI(arg, "https") || swag.ContainsStringsCI(arg, "wss")
+ },
+ "dropPackage": dropPackage,
+ "containsPkgStr": containsPkgStr,
+ "contains": swag.ContainsStrings,
+ "padSurround": padSurround,
+ "joinFilePath": filepath.Join,
+ "joinPath": path.Join,
+ "comment": padComment,
+ "blockcomment": blockComment,
+ "inspect": pretty.Sprint,
+ "cleanPath": path.Clean,
+ "mediaTypeName": mediaMime,
+ "arrayInitializer": lang.arrayInitializer,
+ "hasPrefix": strings.HasPrefix,
+ "stringContains": strings.Contains,
+ "imports": lang.imports,
+ "dict": dict,
+ "isInteger": isInteger,
+ "escapeBackticks": func(arg string) string {
+ return strings.ReplaceAll(arg, "`", "`+\"`\"+`")
+ },
+ "paramDocType": func(param GenParameter) string {
+ return resolvedDocType(param.SwaggerType, param.SwaggerFormat, param.Child)
+ },
+ "headerDocType": func(header GenHeader) string {
+ return resolvedDocType(header.SwaggerType, header.SwaggerFormat, header.Child)
+ },
+ "schemaDocType": func(in interface{}) string {
+ switch schema := in.(type) {
+ case GenSchema:
+ return resolvedDocSchemaType(schema.SwaggerType, schema.SwaggerFormat, schema.Items)
+ case *GenSchema:
+ if schema == nil {
+ return ""
+ }
+ return resolvedDocSchemaType(schema.SwaggerType, schema.SwaggerFormat, schema.Items)
+ case GenDefinition:
+ return resolvedDocSchemaType(schema.SwaggerType, schema.SwaggerFormat, schema.Items)
+ case *GenDefinition:
+ if schema == nil {
+ return ""
+ }
+ return resolvedDocSchemaType(schema.SwaggerType, schema.SwaggerFormat, schema.Items)
+ default:
+ panic("dev error: schemaDocType should be called with GenSchema or GenDefinition")
+ }
+ },
+ "schemaDocMapType": func(schema GenSchema) string {
+ return resolvedDocElemType("object", schema.SwaggerFormat, &schema.resolvedType)
+ },
+ "docCollectionFormat": resolvedDocCollectionFormat,
+ "trimSpace": strings.TrimSpace,
+ "httpStatus": httpStatus,
+ "cleanupEnumVariant": cleanupEnumVariant,
+ "gt0": gt0,
+ }
+
+ for k, v := range extra {
+ f[k] = v
+ }
+
+ return f
+}
+
+func defaultAssets() map[string][]byte {
+ return map[string][]byte{
+ // schema validation templates
+ "validation/primitive.gotmpl": MustAsset("templates/validation/primitive.gotmpl"),
+ "validation/customformat.gotmpl": MustAsset("templates/validation/customformat.gotmpl"),
+ "validation/structfield.gotmpl": MustAsset("templates/validation/structfield.gotmpl"),
+ "structfield.gotmpl": MustAsset("templates/structfield.gotmpl"),
+ "schemavalidator.gotmpl": MustAsset("templates/schemavalidator.gotmpl"),
+ "schemapolymorphic.gotmpl": MustAsset("templates/schemapolymorphic.gotmpl"),
+ "schemaembedded.gotmpl": MustAsset("templates/schemaembedded.gotmpl"),
+ "validation/minimum.gotmpl": MustAsset("templates/validation/minimum.gotmpl"),
+ "validation/maximum.gotmpl": MustAsset("templates/validation/maximum.gotmpl"),
+ "validation/multipleOf.gotmpl": MustAsset("templates/validation/multipleOf.gotmpl"),
+
+ // schema serialization templates
+ "additionalpropertiesserializer.gotmpl": MustAsset("templates/serializers/additionalpropertiesserializer.gotmpl"),
+ "aliasedserializer.gotmpl": MustAsset("templates/serializers/aliasedserializer.gotmpl"),
+ "allofserializer.gotmpl": MustAsset("templates/serializers/allofserializer.gotmpl"),
+ "basetypeserializer.gotmpl": MustAsset("templates/serializers/basetypeserializer.gotmpl"),
+ "marshalbinaryserializer.gotmpl": MustAsset("templates/serializers/marshalbinaryserializer.gotmpl"),
+ "schemaserializer.gotmpl": MustAsset("templates/serializers/schemaserializer.gotmpl"),
+ "subtypeserializer.gotmpl": MustAsset("templates/serializers/subtypeserializer.gotmpl"),
+ "tupleserializer.gotmpl": MustAsset("templates/serializers/tupleserializer.gotmpl"),
+
+ // schema generation template
+ "docstring.gotmpl": MustAsset("templates/docstring.gotmpl"),
+ "schematype.gotmpl": MustAsset("templates/schematype.gotmpl"),
+ "schemabody.gotmpl": MustAsset("templates/schemabody.gotmpl"),
+ "schema.gotmpl": MustAsset("templates/schema.gotmpl"),
+ "model.gotmpl": MustAsset("templates/model.gotmpl"),
+ "header.gotmpl": MustAsset("templates/header.gotmpl"),
+
+ // simple schema generation helpers templates
+ "simpleschema/defaultsvar.gotmpl": MustAsset("templates/simpleschema/defaultsvar.gotmpl"),
+ "simpleschema/defaultsinit.gotmpl": MustAsset("templates/simpleschema/defaultsinit.gotmpl"),
+
+ "swagger_json_embed.gotmpl": MustAsset("templates/swagger_json_embed.gotmpl"),
+
+ // server templates
+ "server/parameter.gotmpl": MustAsset("templates/server/parameter.gotmpl"),
+ "server/urlbuilder.gotmpl": MustAsset("templates/server/urlbuilder.gotmpl"),
+ "server/responses.gotmpl": MustAsset("templates/server/responses.gotmpl"),
+ "server/operation.gotmpl": MustAsset("templates/server/operation.gotmpl"),
+ "server/builder.gotmpl": MustAsset("templates/server/builder.gotmpl"),
+ "server/server.gotmpl": MustAsset("templates/server/server.gotmpl"),
+ "server/configureapi.gotmpl": MustAsset("templates/server/configureapi.gotmpl"),
+ "server/autoconfigureapi.gotmpl": MustAsset("templates/server/autoconfigureapi.gotmpl"),
+ "server/main.gotmpl": MustAsset("templates/server/main.gotmpl"),
+ "server/doc.gotmpl": MustAsset("templates/server/doc.gotmpl"),
+
+ // client templates
+ "client/parameter.gotmpl": MustAsset("templates/client/parameter.gotmpl"),
+ "client/response.gotmpl": MustAsset("templates/client/response.gotmpl"),
+ "client/client.gotmpl": MustAsset("templates/client/client.gotmpl"),
+ "client/facade.gotmpl": MustAsset("templates/client/facade.gotmpl"),
+
+ "markdown/docs.gotmpl": MustAsset("templates/markdown/docs.gotmpl"),
+
+ // cli templates
+ "cli/cli.gotmpl": MustAsset("templates/cli/cli.gotmpl"),
+ "cli/main.gotmpl": MustAsset("templates/cli/main.gotmpl"),
+ "cli/modelcli.gotmpl": MustAsset("templates/cli/modelcli.gotmpl"),
+ "cli/operation.gotmpl": MustAsset("templates/cli/operation.gotmpl"),
+ "cli/registerflag.gotmpl": MustAsset("templates/cli/registerflag.gotmpl"),
+ "cli/retrieveflag.gotmpl": MustAsset("templates/cli/retrieveflag.gotmpl"),
+ "cli/schema.gotmpl": MustAsset("templates/cli/schema.gotmpl"),
+ "cli/completion.gotmpl": MustAsset("templates/cli/completion.gotmpl"),
+ }
+}
+
+func defaultProtectedTemplates() map[string]bool {
+ return map[string]bool{
+ "dereffedSchemaType": true,
+ "docstring": true,
+ "header": true,
+ "mapvalidator": true,
+ "model": true,
+ "modelvalidator": true,
+ "objectvalidator": true,
+ "primitivefieldvalidator": true,
+ "privstructfield": true,
+ "privtuplefield": true,
+ "propertyValidationDocString": true,
+ "propertyvalidator": true,
+ "schema": true,
+ "schemaBody": true,
+ "schemaType": true,
+ "schemabody": true,
+ "schematype": true,
+ "schemavalidator": true,
+ "serverDoc": true,
+ "slicevalidator": true,
+ "structfield": true,
+ "structfieldIface": true,
+ "subTypeBody": true,
+ "swaggerJsonEmbed": true,
+ "tuplefield": true,
+ "tuplefieldIface": true,
+ "typeSchemaType": true,
+ "simpleschemaDefaultsvar": true,
+ "simpleschemaDefaultsinit": true,
+
+ // validation helpers
+ "validationCustomformat": true,
+ "validationPrimitive": true,
+ "validationStructfield": true,
+ "withBaseTypeBody": true,
+ "withoutBaseTypeBody": true,
+ "validationMinimum": true,
+ "validationMaximum": true,
+ "validationMultipleOf": true,
+
+ // all serializers
+ "additionalPropertiesSerializer": true,
+ "tupleSerializer": true,
+ "schemaSerializer": true,
+ "hasDiscriminatedSerializer": true,
+ "discriminatedSerializer": true,
+ }
+}
+
+// AddFile adds a file to the default repository. It will create a new template based on the filename.
+// It trims the .gotmpl from the end and converts the name using swag.ToJSONName. This will strip
+// directory separators and Camelcase the next letter.
+// e.g validation/primitive.gotmpl will become validationPrimitive
+//
+// If the file contains a definition for a template that is protected the whole file will not be added
+func AddFile(name, data string) error {
+ return templates.addFile(name, data, false)
+}
+
+// NewRepository creates a new template repository with the provided functions defined
+func NewRepository(funcs template.FuncMap) *Repository {
+ repo := Repository{
+ files: make(map[string]string),
+ templates: make(map[string]*template.Template),
+ funcs: funcs,
+ }
+
+ if repo.funcs == nil {
+ repo.funcs = make(template.FuncMap)
+ }
+
+ return &repo
+}
+
+// Repository is the repository for the generator templates
+type Repository struct {
+ files map[string]string
+ templates map[string]*template.Template
+ funcs template.FuncMap
+ allowOverride bool
+ mux sync.Mutex
+}
+
+// ShallowClone a repository.
+//
+// Clones the maps of files and templates, so as to be able to use
+// the cloned repo concurrently.
+func (t *Repository) ShallowClone() *Repository {
+ clone := &Repository{
+ files: make(map[string]string, len(t.files)),
+ templates: make(map[string]*template.Template, len(t.templates)),
+ funcs: t.funcs,
+ allowOverride: t.allowOverride,
+ }
+
+ t.mux.Lock()
+ defer t.mux.Unlock()
+
+ for k, file := range t.files {
+ clone.files[k] = file
+ }
+ for k, tpl := range t.templates {
+ clone.templates[k] = tpl
+ }
+ return clone
+}
+
+// LoadDefaults will load the embedded templates
+func (t *Repository) LoadDefaults() {
+
+ for name, asset := range assets {
+ if err := t.addFile(name, string(asset), true); err != nil {
+ log.Fatal(err)
+ }
+ }
+}
+
+// LoadDir will walk the specified path and add each .gotmpl file it finds to the repository
+func (t *Repository) LoadDir(templatePath string) error {
+ err := filepath.Walk(templatePath, func(path string, info os.FileInfo, err error) error {
+
+ if strings.HasSuffix(path, ".gotmpl") {
+ if assetName, e := filepath.Rel(templatePath, path); e == nil {
+ if data, e := os.ReadFile(path); e == nil {
+ if ee := t.AddFile(assetName, string(data)); ee != nil {
+ return fmt.Errorf("could not add template: %v", ee)
+ }
+ }
+ // Non-readable files are skipped
+ }
+ }
+ if err != nil {
+ return err
+ }
+ // Non-template files are skipped
+ return nil
+ })
+ if err != nil {
+ return fmt.Errorf("could not complete template processing in directory \"%s\": %v", templatePath, err)
+ }
+ return nil
+}
+
+// LoadContrib loads template from contrib directory
+func (t *Repository) LoadContrib(name string) error {
+ log.Printf("loading contrib %s", name)
+ const pathPrefix = "templates/contrib/"
+ basePath := pathPrefix + name
+ filesAdded := 0
+ for _, aname := range AssetNames() {
+ if !strings.HasSuffix(aname, ".gotmpl") {
+ continue
+ }
+ if strings.HasPrefix(aname, basePath) {
+ target := aname[len(basePath)+1:]
+ err := t.addFile(target, string(MustAsset(aname)), true)
+ if err != nil {
+ return err
+ }
+ log.Printf("added contributed template %s from %s", target, aname)
+ filesAdded++
+ }
+ }
+ if filesAdded == 0 {
+ return fmt.Errorf("no files added from template: %s", name)
+ }
+ return nil
+}
+
+func (t *Repository) addFile(name, data string, allowOverride bool) error {
+ fileName := name
+ name = swag.ToJSONName(strings.TrimSuffix(name, ".gotmpl"))
+
+ templ, err := template.New(name).Funcs(t.funcs).Parse(data)
+
+ if err != nil {
+ return fmt.Errorf("failed to load template %s: %v", name, err)
+ }
+
+ // check if any protected templates are defined
+ if !allowOverride && !t.allowOverride {
+ for _, template := range templ.Templates() {
+ if protectedTemplates[template.Name()] {
+ return fmt.Errorf("cannot overwrite protected template %s", template.Name())
+ }
+ }
+ }
+
+ // Add each defined template into the cache
+ for _, template := range templ.Templates() {
+
+ t.files[template.Name()] = fileName
+ t.templates[template.Name()] = template.Lookup(template.Name())
+ }
+
+ return nil
+}
+
+// MustGet a template by name, panics when fails
+func (t *Repository) MustGet(name string) *template.Template {
+ tpl, err := t.Get(name)
+ if err != nil {
+ panic(err)
+ }
+ return tpl
+}
+
+// AddFile adds a file to the repository. It will create a new template based on the filename.
+// It trims the .gotmpl from the end and converts the name using swag.ToJSONName. This will strip
+// directory separators and Camelcase the next letter.
+// e.g validation/primitive.gotmpl will become validationPrimitive
+//
+// If the file contains a definition for a template that is protected the whole file will not be added
+func (t *Repository) AddFile(name, data string) error {
+ return t.addFile(name, data, false)
+}
+
+// SetAllowOverride allows setting allowOverride after the Repository was initialized
+func (t *Repository) SetAllowOverride(value bool) {
+ t.allowOverride = value
+}
+
+func findDependencies(n parse.Node) []string {
+
+ var deps []string
+ depMap := make(map[string]bool)
+
+ if n == nil {
+ return deps
+ }
+
+ switch node := n.(type) {
+ case *parse.ListNode:
+ if node != nil && node.Nodes != nil {
+ for _, nn := range node.Nodes {
+ for _, dep := range findDependencies(nn) {
+ depMap[dep] = true
+ }
+ }
+ }
+ case *parse.IfNode:
+ for _, dep := range findDependencies(node.BranchNode.List) {
+ depMap[dep] = true
+ }
+ for _, dep := range findDependencies(node.BranchNode.ElseList) {
+ depMap[dep] = true
+ }
+
+ case *parse.RangeNode:
+ for _, dep := range findDependencies(node.BranchNode.List) {
+ depMap[dep] = true
+ }
+ for _, dep := range findDependencies(node.BranchNode.ElseList) {
+ depMap[dep] = true
+ }
+
+ case *parse.WithNode:
+ for _, dep := range findDependencies(node.BranchNode.List) {
+ depMap[dep] = true
+ }
+ for _, dep := range findDependencies(node.BranchNode.ElseList) {
+ depMap[dep] = true
+ }
+
+ case *parse.TemplateNode:
+ depMap[node.Name] = true
+ }
+
+ for dep := range depMap {
+ deps = append(deps, dep)
+ }
+
+ return deps
+
+}
+
+func (t *Repository) flattenDependencies(templ *template.Template, dependencies map[string]bool) map[string]bool {
+ if dependencies == nil {
+ dependencies = make(map[string]bool)
+ }
+
+ deps := findDependencies(templ.Tree.Root)
+
+ for _, d := range deps {
+ if _, found := dependencies[d]; !found {
+
+ dependencies[d] = true
+
+ if tt := t.templates[d]; tt != nil {
+ dependencies = t.flattenDependencies(tt, dependencies)
+ }
+ }
+
+ dependencies[d] = true
+
+ }
+
+ return dependencies
+
+}
+
+func (t *Repository) addDependencies(templ *template.Template) (*template.Template, error) {
+
+ name := templ.Name()
+
+ deps := t.flattenDependencies(templ, nil)
+
+ for dep := range deps {
+
+ if dep == "" {
+ continue
+ }
+
+ tt := templ.Lookup(dep)
+
+ // Check if we have it
+ if tt == nil {
+ tt = t.templates[dep]
+
+ // Still don't have it, return an error
+ if tt == nil {
+ return templ, fmt.Errorf("could not find template %s", dep)
+ }
+ var err error
+
+ // Add it to the parse tree
+ templ, err = templ.AddParseTree(dep, tt.Tree)
+
+ if err != nil {
+ return templ, fmt.Errorf("dependency error: %v", err)
+ }
+
+ }
+ }
+ return templ.Lookup(name), nil
+}
+
+// Get will return the named template from the repository, ensuring that all dependent templates are loaded.
+// It will return an error if a dependent template is not defined in the repository.
+func (t *Repository) Get(name string) (*template.Template, error) {
+ templ, found := t.templates[name]
+
+ if !found {
+ return templ, fmt.Errorf("template doesn't exist %s", name)
+ }
+
+ return t.addDependencies(templ)
+}
+
+// DumpTemplates prints out a dump of all the defined templates, where they are defined and what their dependencies are.
+func (t *Repository) DumpTemplates() {
+ buf := bytes.NewBuffer(nil)
+ fmt.Fprintln(buf, "\n# Templates")
+ for name, templ := range t.templates {
+ fmt.Fprintf(buf, "## %s\n", name)
+ fmt.Fprintf(buf, "Defined in `%s`\n", t.files[name])
+
+ if deps := findDependencies(templ.Tree.Root); len(deps) > 0 {
+
+ fmt.Fprintf(buf, "####requires \n - %v\n\n\n", strings.Join(deps, "\n - "))
+ }
+ fmt.Fprintln(buf, "\n---")
+ }
+ log.Println(buf.String())
+}
+
+// FuncMap functions
+
+func asJSON(data interface{}) (string, error) {
+ b, err := json.Marshal(data)
+ if err != nil {
+ return "", err
+ }
+ return string(b), nil
+}
+
+func asPrettyJSON(data interface{}) (string, error) {
+ b, err := json.MarshalIndent(data, "", " ")
+ if err != nil {
+ return "", err
+ }
+ return string(b), nil
+}
+
+func pluralizeFirstWord(arg string) string {
+ sentence := strings.Split(arg, " ")
+ if len(sentence) == 1 {
+ return inflect.Pluralize(arg)
+ }
+
+ return inflect.Pluralize(sentence[0]) + " " + strings.Join(sentence[1:], " ")
+}
+
+func dropPackage(str string) string {
+ parts := strings.Split(str, ".")
+ return parts[len(parts)-1]
+}
+
+// return true if the GoType str contains pkg. For example "model.MyType" -> true, "MyType" -> false
+func containsPkgStr(str string) bool {
+ dropped := dropPackage(str)
+ return !(dropped == str)
+}
+
+func padSurround(entry, padWith string, i, ln int) string {
+ var res []string
+ if i > 0 {
+ for j := 0; j < i; j++ {
+ res = append(res, padWith)
+ }
+ }
+ res = append(res, entry)
+ tot := ln - i - 1
+ for j := 0; j < tot; j++ {
+ res = append(res, padWith)
+ }
+ return strings.Join(res, ",")
+}
+
+func padComment(str string, pads ...string) string {
+ // pads specifes padding to indent multi line comments.Defaults to one space
+ pad := " "
+ lines := strings.Split(str, "\n")
+ if len(pads) > 0 {
+ pad = strings.Join(pads, "")
+ }
+ return (strings.Join(lines, "\n//"+pad))
+}
+
+func blockComment(str string) string {
+ return strings.ReplaceAll(str, "*/", "[*]/")
+}
+
+func pascalize(arg string) string {
+ runes := []rune(arg)
+ switch len(runes) {
+ case 0:
+ return "Empty"
+ case 1: // handle special case when we have a single rune that is not handled by swag.ToGoName
+ switch runes[0] {
+ case '+', '-', '#', '_', '*', '/', '=': // those cases are handled differently than swag utility
+ return prefixForName(arg)
+ }
+ }
+ return swag.ToGoName(swag.ToGoName(arg)) // want to remove spaces
+}
+
+func prefixForName(arg string) string {
+ first := []rune(arg)[0]
+ if len(arg) == 0 || unicode.IsLetter(first) {
+ return ""
+ }
+ switch first {
+ case '+':
+ return "Plus"
+ case '-':
+ return "Minus"
+ case '#':
+ return "HashTag"
+ case '*':
+ return "Asterisk"
+ case '/':
+ return "ForwardSlash"
+ case '=':
+ return "EqualSign"
+ // other cases ($,@ etc..) handled by swag.ToGoName
+ }
+ return "Nr"
+}
+
+func replaceSpecialChar(in rune) string {
+ switch in {
+ case '.':
+ return "-Dot-"
+ case '+':
+ return "-Plus-"
+ case '-':
+ return "-Dash-"
+ case '#':
+ return "-Hashtag-"
+ }
+ return string(in)
+}
+
+func cleanupEnumVariant(in string) string {
+ replaced := ""
+ for _, char := range in {
+ replaced += replaceSpecialChar(char)
+ }
+ return replaced
+}
+
+func dict(values ...interface{}) (map[string]interface{}, error) {
+ if len(values)%2 != 0 {
+ return nil, fmt.Errorf("expected even number of arguments, got %d", len(values))
+ }
+ dict := make(map[string]interface{}, len(values)/2)
+ for i := 0; i < len(values); i += 2 {
+ key, ok := values[i].(string)
+ if !ok {
+ return nil, fmt.Errorf("expected string key, got %+v", values[i])
+ }
+ dict[key] = values[i+1]
+ }
+ return dict, nil
+}
+
+func isInteger(arg interface{}) bool {
+ // is integer determines if a value may be represented by an integer
+ switch val := arg.(type) {
+ case int8, int16, int32, int, int64, uint8, uint16, uint32, uint, uint64:
+ return true
+ case *int8, *int16, *int32, *int, *int64, *uint8, *uint16, *uint32, *uint, *uint64:
+ v := reflect.ValueOf(arg)
+ return !v.IsNil()
+ case float64:
+ return math.Round(val) == val
+ case *float64:
+ return val != nil && math.Round(*val) == *val
+ case float32:
+ return math.Round(float64(val)) == float64(val)
+ case *float32:
+ return val != nil && math.Round(float64(*val)) == float64(*val)
+ case string:
+ _, err := strconv.ParseInt(val, 10, 64)
+ return err == nil
+ case *string:
+ if val == nil {
+ return false
+ }
+ _, err := strconv.ParseInt(*val, 10, 64)
+ return err == nil
+ default:
+ return false
+ }
+}
+
+func resolvedDocCollectionFormat(cf string, child *GenItems) string {
+ if child == nil {
+ return cf
+ }
+ ccf := cf
+ if ccf == "" {
+ ccf = "csv"
+ }
+ rcf := resolvedDocCollectionFormat(child.CollectionFormat, child.Child)
+ if rcf == "" {
+ return ccf
+ }
+ return ccf + "|" + rcf
+}
+
+func resolvedDocType(tn, ft string, child *GenItems) string {
+ if tn == "array" {
+ if child == nil {
+ return "[]any"
+ }
+ return "[]" + resolvedDocType(child.SwaggerType, child.SwaggerFormat, child.Child)
+ }
+
+ if ft != "" {
+ if doc, ok := docFormat[ft]; ok {
+ return doc
+ }
+ return fmt.Sprintf("%s (formatted %s)", ft, tn)
+ }
+
+ return tn
+}
+
+func resolvedDocSchemaType(tn, ft string, child *GenSchema) string {
+ if tn == "array" {
+ if child == nil {
+ return "[]any"
+ }
+ return "[]" + resolvedDocSchemaType(child.SwaggerType, child.SwaggerFormat, child.Items)
+ }
+
+ if tn == "object" {
+ if child == nil || child.ElemType == nil {
+ return "map of any"
+ }
+ if child.IsMap {
+ return "map of " + resolvedDocElemType(child.SwaggerType, child.SwaggerFormat, &child.resolvedType)
+ }
+
+ return child.GoType
+ }
+
+ if ft != "" {
+ if doc, ok := docFormat[ft]; ok {
+ return doc
+ }
+ return fmt.Sprintf("%s (formatted %s)", ft, tn)
+ }
+
+ return tn
+}
+
+func resolvedDocElemType(tn, ft string, schema *resolvedType) string {
+ if schema == nil {
+ return ""
+ }
+ if schema.IsMap {
+ return "map of " + resolvedDocElemType(schema.ElemType.SwaggerType, schema.ElemType.SwaggerFormat, schema.ElemType)
+ }
+
+ if schema.IsArray {
+ return "[]" + resolvedDocElemType(schema.ElemType.SwaggerType, schema.ElemType.SwaggerFormat, schema.ElemType)
+ }
+
+ if ft != "" {
+ if doc, ok := docFormat[ft]; ok {
+ return doc
+ }
+ return fmt.Sprintf("%s (formatted %s)", ft, tn)
+ }
+
+ return tn
+}
+
+func httpStatus(code int) string {
+ if name, ok := runtime.Statuses[code]; ok {
+ return name
+ }
+ // non-standard codes deserve some name
+ return fmt.Sprintf("Status %d", code)
+}
+
+func gt0(in *int64) bool {
+ // gt0 returns true if the *int64 points to a value > 0
+ // NOTE: plain {{ gt .MinProperties 0 }} just refuses to work normally
+ // with a pointer
+ return in != nil && *in > 0
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl
new file mode 100644
index 000000000..3d88c5beb
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl
@@ -0,0 +1,242 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .GenOpts.CliPackage }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+
+ "github.com/spf13/cobra"
+ "github.com/spf13/viper"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/swag"
+ httptransport "github.com/go-openapi/runtime/client"
+ homedir "github.com/mitchellh/go-homedir"
+)
+
+// debug flag indicating that cli should output debug logs
+var debug bool
+// config file location
+var configFile string
+// dry run flag
+var dryRun bool
+
+// name of the executable
+var exeName string = filepath.Base(os.Args[0])
+
+// logDebugf writes debug log to stdout
+func logDebugf(format string, v ...interface{}) {
+ if !debug{
+ return
+ }
+ log.Printf(format, v...)
+}
+
+{{/*TODO: make this a swagger cli option*/}}
+// depth of recursion to construct model flags
+var maxDepth int = 5
+
+// makeClient constructs a client object
+func makeClient(cmd *cobra.Command, args []string) (*client.{{ pascalize .Name }}, error) {
+ hostname := viper.GetString("hostname")
+ viper.SetDefault("base_path", client.DefaultBasePath)
+ basePath := viper.GetString("base_path")
+ scheme := viper.GetString("scheme")
+
+ r := httptransport.New(hostname, basePath, []string{scheme})
+ r.SetDebug(debug)
+
+ {{- /* user might define custom mediatype xxx/json and there is no registered ones to handle. */}}
+ // set custom producer and consumer to use the default ones
+ {{ range .Consumes }}
+ {{ range .AllSerializers }}
+ {{- if stringContains .MediaType "json" }}
+ r.Consumers["{{ .MediaType }}"] = runtime.JSONConsumer()
+ {{- else }}
+ // warning: consumes {{ .MediaType }} is not supported by go-swagger cli yet
+ {{- end }}
+ {{- end }}
+ {{ end }}
+ {{ range .Produces }}
+ {{- range .AllSerializers }}
+ {{- if stringContains .MediaType "json" }}
+ r.Producers["{{ .MediaType }}"] = runtime.JSONProducer()
+ {{- else }}
+ // warning: produces {{ .MediaType }} is not supported by go-swagger cli yet
+ {{- end }}
+ {{- end }}
+ {{ end }}
+
+ {{- if .SecurityDefinitions }}
+ auth, err := makeAuthInfoWriter(cmd)
+ if err != nil {
+ return nil, err
+ }
+ r.DefaultAuthentication = auth
+ {{ end }}
+ appCli := client.New(r, strfmt.Default)
+ logDebugf("Server url: %v://%v", scheme, hostname)
+ return appCli, nil
+}
+
+// MakeRootCmd returns the root cmd
+func MakeRootCmd() (*cobra.Command, error) {
+ cobra.OnInitialize(initViperConfigs)
+
+ // Use executable name as the command name
+ rootCmd := &cobra.Command{
+ Use: exeName,
+ }
+ {{/*note: viper binded flag value must be retrieved from viper rather than cmd*/}}
+ // register basic flags
+ rootCmd.PersistentFlags().String("hostname", client.DefaultHost, "hostname of the service")
+ viper.BindPFlag("hostname", rootCmd.PersistentFlags().Lookup("hostname"))
+ rootCmd.PersistentFlags().String("scheme", client.DefaultSchemes[0], fmt.Sprintf("Choose from: %v", client.DefaultSchemes))
+ viper.BindPFlag("scheme", rootCmd.PersistentFlags().Lookup("scheme"))
+ rootCmd.PersistentFlags().String("base-path", client.DefaultBasePath, fmt.Sprintf("For example: %v", client.DefaultBasePath))
+ viper.BindPFlag("base_path", rootCmd.PersistentFlags().Lookup("base-path"))
+
+ // configure debug flag
+ rootCmd.PersistentFlags().BoolVar(&debug, "debug", false, "output debug logs")
+ // configure config location
+ rootCmd.PersistentFlags().StringVar(&configFile, "config", "", "config file path")
+ // configure dry run flag
+ rootCmd.PersistentFlags().BoolVar(&dryRun, "dry-run", false, "do not send the request to server")
+
+ // register security flags
+ {{- if .SecurityDefinitions }}
+ if err := registerAuthInoWriterFlags(rootCmd); err != nil{
+ return nil, err
+ }
+ {{- end }}
+ // add all operation groups
+{{- range .OperationGroups -}}
+ {{- $operationGroupCmdVarName := printf "operationGroup%vCmd" (pascalize .Name) }}
+ {{ $operationGroupCmdVarName }}, err := makeOperationGroup{{ pascalize .Name }}Cmd()
+ if err != nil {
+ return nil, err
+ }
+ rootCmd.AddCommand({{ $operationGroupCmdVarName }})
+{{ end }}
+
+ // add cobra completion
+ rootCmd.AddCommand(makeGenCompletionCmd())
+
+ return rootCmd, nil
+}
+
+// initViperConfigs initialize viper config using config file in '$HOME/.config/<cli name>/config.<json|yaml...>'
+// currently hostname, scheme and auth tokens can be specified in this config file.
+func initViperConfigs() {
+ if configFile != "" {
+ // use user specified config file location
+ viper.SetConfigFile(configFile)
+ }else{
+ // look for default config
+ // Find home directory.
+ home, err := homedir.Dir()
+ cobra.CheckErr(err)
+
+ // Search config in home directory with name ".cobra" (without extension).
+ viper.AddConfigPath(path.Join(home, ".config", exeName))
+ viper.SetConfigName("config")
+ }
+
+ if err := viper.ReadInConfig(); err != nil {
+ logDebugf("Error: loading config file: %v", err)
+ return
+ }
+ logDebugf("Using config file: %v", viper.ConfigFileUsed())
+}
+
+{{- if .SecurityDefinitions }}
+{{- /*youyuan: rework this since spec may define multiple auth schemes.
+ cli needs to detect which one user passed rather than add all of them.*/}}
+// registerAuthInoWriterFlags registers all flags needed to perform authentication
+func registerAuthInoWriterFlags(cmd *cobra.Command) error {
+{{- range .SecurityDefinitions }}
+ /*{{.Name}} {{.Description}}*/
+ {{- if .IsBasicAuth }}
+ cmd.PersistentFlags().String("username", "", "username for basic auth")
+ viper.BindPFlag("username", cmd.PersistentFlags().Lookup("username"))
+ cmd.PersistentFlags().String("password", "", "password for basic auth")
+ viper.BindPFlag("password", cmd.PersistentFlags().Lookup("password"))
+ {{- end }}
+ {{- if .IsAPIKeyAuth }}
+ cmd.PersistentFlags().String("{{.Name}}", "", `{{.Description}}`)
+ viper.BindPFlag("{{.Name}}", cmd.PersistentFlags().Lookup("{{.Name}}"))
+ {{- end }}
+ {{- if .IsOAuth2 }}
+ // oauth2: let user provide the token in a flag, rather than implement the logic to fetch the token.
+ cmd.PersistentFlags().String("oauth2-token", "", `{{.Description}}`)
+ viper.BindPFlag("oauth2-token", cmd.PersistentFlags().Lookup("oauth2-token"))
+ {{- end }}
+{{- end }}
+ return nil
+}
+
+// makeAuthInfoWriter retrieves cmd flags and construct an auth info writer
+func makeAuthInfoWriter(cmd *cobra.Command) (runtime.ClientAuthInfoWriter, error) {
+ auths := []runtime.ClientAuthInfoWriter{}
+{{- range .SecurityDefinitions }}
+ /*{{.Name}} {{.Description}}*/
+ {{- if .IsBasicAuth }}
+ if viper.IsSet("username") {
+ usr := viper.GetString("username")
+ if !viper.IsSet("password"){
+ return nil, fmt.Errorf("Basic Auth password for user [%v] is not provided.", usr)
+ }
+ pwd := viper.GetString("password")
+ auths = append(auths, httptransport.BasicAuth(usr,pwd))
+ }
+ {{- end }}
+ {{- if .IsAPIKeyAuth }}
+ if viper.IsSet("{{.Name}}") {
+ {{ pascalize .Name }}Key := viper.GetString("{{.Name}}")
+ auths = append(auths, httptransport.APIKeyAuth("{{.Name}}", "{{.In}}", {{ pascalize .Name }}Key))
+ }
+ {{- end }}
+ {{- if .IsOAuth2 }}
+ if viper.IsSet("oauth2-token") {
+ // oauth2 workflow for generated CLI is not ideal.
+ // If you have suggestions on how to support it, raise an issue here: https://github.com/go-swagger/go-swagger/issues
+ // This will be added to header: "Authorization: Bearer {oauth2-token value}"
+ token := viper.GetString("oauth2-token")
+ auths = append(auths, httptransport.BearerToken(token))
+ }
+ {{- end }}
+{{- end }}
+ if len(auths) == 0 {
+ logDebugf("Warning: No auth params detected.")
+ return nil, nil
+ }
+ // compose all auths together
+ return httptransport.Compose(auths...), nil
+}
+{{- end }}
+
+{{ range .OperationGroups -}}
+func makeOperationGroup{{ pascalize .Name }}Cmd() (*cobra.Command, error) {
+ {{- $operationGroupCmdVarName := printf "operationGroup%vCmd" (pascalize .Name) }}
+ {{ $operationGroupCmdVarName }} := &cobra.Command{
+ Use: "{{ .Name }}",
+ Long: `{{ .Description }}`,
+ }
+{{ range .Operations }}
+ {{- $operationCmdVarName := printf "operation%vCmd" (pascalize .Name) }}
+ {{ $operationCmdVarName }}, err := makeOperation{{pascalize .Package}}{{ pascalize .Name }}Cmd()
+ if err != nil {
+ return nil, err
+ }
+ {{ $operationGroupCmdVarName }}.AddCommand({{ $operationCmdVarName }})
+{{ end }}
+ return {{ $operationGroupCmdVarName }}, nil
+}
+{{ end }} {{/*operation group*/}}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/completion.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/completion.gotmpl
new file mode 100644
index 000000000..c115cc1a9
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/completion.gotmpl
@@ -0,0 +1,77 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .GenOpts.CliPackage }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import(
+ "github.com/spf13/cobra"
+)
+
+func makeGenCompletionCmd() *cobra.Command{
+
+ var completionCmd = &cobra.Command{
+ Use: "completion [bash|zsh|fish|powershell]",
+ Short: "Generate completion script",
+ Long: `To load completions:
+
+Bash:
+
+ $ source <(yourprogram completion bash)
+
+ # To load completions for each session, execute once:
+ # Linux:
+ $ yourprogram completion bash > /etc/bash_completion.d/yourprogram
+ # macOS:
+ $ yourprogram completion bash > /usr/local/etc/bash_completion.d/yourprogram
+
+Zsh:
+
+ # If shell completion is not already enabled in your environment,
+ # you will need to enable it. You can execute the following once:
+
+ $ echo "autoload -U compinit; compinit" >> ~/.zshrc
+
+ # To load completions for each session, execute once:
+ $ yourprogram completion zsh > "${fpath[1]}/_yourprogram"
+
+ # You will need to start a new shell for this setup to take effect.
+
+fish:
+
+ $ yourprogram completion fish | source
+
+ # To load completions for each session, execute once:
+ $ yourprogram completion fish > ~/.config/fish/completions/yourprogram.fish
+
+PowerShell:
+
+ PS> yourprogram completion powershell | Out-String | Invoke-Expression
+
+ # To load completions for every new session, run:
+ PS> yourprogram completion powershell > yourprogram.ps1
+ # and source this file from your PowerShell profile.
+`,
+ DisableFlagsInUseLine: true,
+ ValidArgs: []string{"bash", "zsh", "fish", "powershell"},
+ Args: cobra.ExactValidArgs(1),
+ Run: func(cmd *cobra.Command, args []string) {
+ switch args[0] {
+ case "bash":
+ cmd.Root().GenBashCompletion(os.Stdout)
+ case "zsh":
+ cmd.Root().GenZshCompletion(os.Stdout)
+ case "fish":
+ cmd.Root().GenFishCompletion(os.Stdout, true)
+ case "powershell":
+ cmd.Root().GenPowerShellCompletionWithDesc(os.Stdout)
+ }
+ },
+ }
+ return completionCmd
+} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl
new file mode 100644
index 000000000..6cc470a2f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl
@@ -0,0 +1,28 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package main
+
+import (
+ "encoding/json"
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+func main() {
+ rootCmd,err := cli.MakeRootCmd()
+ if err != nil {
+ fmt.Println("Cmd construction error: ", err)
+ os.Exit(1)
+ }
+
+ if err := rootCmd.Execute(); err != nil {
+ os.Exit(1)
+ }
+} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl
new file mode 100644
index 000000000..d93e91d41
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl
@@ -0,0 +1,25 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package cli
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+
+import (
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+ "github.com/spf13/cobra"
+)
+
+// Schema cli for {{.GoType}}
+{{ template "modelschemacli" .}}
+
+{{ range .ExtraSchemas }}
+// Extra schema cli for {{.GoType}}
+ {{ template "modelschemacli" .}}
+{{ end }} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl
new file mode 100644
index 000000000..10666ed78
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl
@@ -0,0 +1,230 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+{{- /*TODO: do not hardcode cli pkg*/}}
+package cli
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+
+ "github.com/spf13/cobra"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/swag"
+ httptransport "github.com/go-openapi/runtime/client"
+)
+
+// makeOperation{{pascalize .Package}}{{ pascalize .Name }}Cmd returns a cmd to handle operation {{ camelize .Name }}
+func makeOperation{{pascalize .Package}}{{ pascalize .Name }}Cmd() (*cobra.Command, error) {
+ cmd := &cobra.Command{
+ Use: "{{ .Name }}",
+ Short: `{{ escapeBackticks .Description}}`,
+ RunE: runOperation{{pascalize .Package}}{{ pascalize .Name }},
+ }
+
+ if err := registerOperation{{pascalize .Package}}{{ pascalize .Name }}ParamFlags(cmd); err != nil{
+ return nil, err
+ }
+
+ return cmd, nil
+}
+
+{{ $operationGroup := .Package }}
+{{ $operation := .Name }}
+{{ $operationPkgAlias := .PackageAlias }}
+// runOperation{{pascalize $operationGroup }}{{ pascalize $operation }} uses cmd flags to call endpoint api
+func runOperation{{pascalize $operationGroup }}{{ pascalize $operation }}(cmd *cobra.Command, args []string) error{
+ appCli, err := makeClient(cmd, args)
+ if err != nil {
+ return err
+ }
+ // retrieve flag values from cmd and fill params
+ params := {{ .PackageAlias }}.New{{ pascalize .Name}}Params()
+{{- range .Params }}
+ if err, _ := retrieveOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{ pascalize .Name }}Flag(params, "", cmd); err != nil{
+ return err
+ }
+{{- end }} {{/*Params*/}}
+ if dryRun {
+ {{/* Note: dry run is not very useful for now, but useful when validation is added in future*/}}
+ logDebugf("dry-run flag specified. Skip sending request.")
+ return nil
+ }
+ // make request and then print result
+{{- /*Package string is the operation name*/}}
+ msgStr, err := parseOperation{{pascalize .Package}}{{ pascalize .Name }}Result(appCli.{{- pascalize .Package }}.{{ pascalize .Name }}(params {{- if .Authorized}}, nil{{ end }}{{ if .HasStreamingResponse }}, &bytes.Buffer{}{{ end }}))
+ if err != nil {
+ return err
+ }
+ if !debug{
+ {{/* In debug mode content should have been printed in transport layer, so do not print again*/}}
+ fmt.Println(msgStr)
+ }
+ return nil
+}
+
+// registerOperation{{pascalize $operationGroup }}{{ pascalize $operation }}ParamFlags registers all flags needed to fill params
+func registerOperation{{pascalize $operationGroup }}{{ pascalize $operation }}ParamFlags(cmd *cobra.Command) error {
+{{- range .Params }}
+ if err := registerOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{pascalize .Name }}ParamFlags("", cmd); err != nil{
+ return err
+ }
+{{- end }}
+ return nil
+}
+
+{{/*register functions for each fields in this operation*/}}
+{{- range .Params }}
+func registerOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{pascalize .Name }}ParamFlags(cmdPrefix string, cmd *cobra.Command) error{
+ {{- if .IsPrimitive }}
+ {{ template "primitiveregistrator" . }}
+ {{- else if .IsArray }}
+ {{ template "arrayregistrator" . }}
+ {{- else if and .IsBodyParam .Schema (not .IsArray) (not .IsMap) (not .IsStream) }}
+ {{ template "modelparamstringregistrator" . }}
+ {{ template "modelparamregistrator" . }}
+ {{/* Do not mark body flag as required, since the individial flag for body field will be added separately */}}
+ {{- else }}
+ // warning: go type {{ .GoType }} is not supported by go-swagger cli yet.
+ {{- end }}
+ return nil
+}
+{{- end }}
+
+{{/*functions to retreive each field of params*/}}
+{{- range .Params }}
+func retrieveOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{ pascalize .Name }}Flag(m *{{ $operationPkgAlias }}.{{ pascalize $operation }}Params, cmdPrefix string, cmd *cobra.Command) (error,bool){
+ retAdded := false
+ {{- $flagStr := .Name }}
+ {{- $flagValueVar := printf "%vValue" (camelize .Name) }}
+ {{- /*only set the param if user set the flag*/}}
+ if cmd.Flags().Changed("{{ $flagStr }}") {
+ {{- if .IsPrimitive }}
+ {{ template "primitiveretriever" . }}
+ {{- else if .IsArray }}
+ {{ template "arrayretriever" . }}
+ {{- else if .IsMap }}
+ // warning: {{ .Name }} map type {{.GoType}} is not supported by go-swagger cli yet
+ {{- else if and .IsBodyParam .Schema .IsComplexObject (not .IsStream) }}
+ {{- /*schema payload can be passed in cmd as a string and here is unmarshalled to model struct and attached in params*/}}
+ // Read {{ $flagStr }} string from cmd and unmarshal
+ {{ $flagValueVar }}Str, err := cmd.Flags().GetString("{{ $flagStr }}")
+ if err != nil {
+ return err, false
+ }
+ {{/*Note anonymous body schema is not pointer*/}}
+ {{ $flagValueVar }} := {{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{}
+ if err := json.Unmarshal([]byte({{ $flagValueVar }}Str), &{{ $flagValueVar }}); err!= nil{
+ return fmt.Errorf("cannot unmarshal {{ $flagStr }} string in {{.GoType}}: %v", err), false
+ }
+ m.{{ .ID }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }}
+ {{- else }}
+ // warning: {{.GoType}} is not supported by go-swagger cli yet
+ {{- end }} {{/*end go type case*/}}
+ }
+ {{- if and .IsBodyParam .Schema .IsComplexObject (not .IsArray) (not .IsMap) (not .IsStream) }}
+ {{- /* Add flags to capture fields in Body. If previously Body struct was constructed in unmarshalling body string,
+ then reuse the struct, otherwise construct an empty value struct to fill. Here body field flags overwrites
+ unmarshalled body string values. */}}
+ {{- $flagModelVar := printf "%vModel" (camelize $flagValueVar) }}
+ {{ $flagModelVar }} := m.{{ .ID }}
+ if swag.IsZero({{ $flagModelVar }}){
+ {{ $flagModelVar }} = {{- if .IsNullable }}&{{- end }}{{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{}
+ }
+ {{- /*Only attach the body struct in params if user passed some flag filling some body fields.*/}}
+ {{- /* add "&" to $flagModelVar when it is not nullable because the retrieve method always expects a pointer */}}
+ err, added := retrieveModel{{ pascalize (dropPackage .GoType) }}Flags(0, {{if not .IsNullable}}&{{end}}{{ $flagModelVar }}, "{{ camelize (dropPackage .GoType) }}", cmd)
+ if err != nil{
+ return err, false
+ }
+ if added {
+ m.{{.ID}} = {{ $flagModelVar }}
+ }
+ if dryRun && debug {
+ {{/* dry run we don't get trasnport debug strings, so print it here*/}}
+ {{- $bodyDebugVar := printf "%vDebugBytes" (camelize $flagValueVar) }}
+ {{ $bodyDebugVar }}, err := json.Marshal(m.{{.ID}})
+ if err != nil{
+ return err, false
+ }
+ logDebugf("{{.ID }} dry-run payload: %v", string({{ $bodyDebugVar }}))
+ }
+ retAdded = retAdded || added
+ {{/*body debug string will be printed in transport layer*/}}
+ {{- end }}
+ return nil, retAdded
+}
+{{- end }} {{/*Params*/}}
+
+// parseOperation{{pascalize .Package}}{{ pascalize .Name }}Result parses request result and return the string content
+{{- /*TODO: handle multiple success response case*/}}
+func parseOperation{{pascalize .Package}}{{ pascalize .Name }}Result({{- if .SuccessResponse }}{{ range $i, $v := .SuccessResponses }} resp{{$i}} *{{$v.Package}}.{{pascalize $v.Name}},{{- end }}{{- end }} respErr error) (string, error){
+ if respErr != nil {
+ {{- /*error is of type default model. If we can cast, then print the resp.*/}}
+ {{ if .DefaultResponse }} {{with .DefaultResponse}}
+ {{ if .Schema }}
+ var iRespD interface{} = respErr
+ respD, ok := iRespD.(*{{ .Package }}.{{ pascalize .Name }})
+ if ok {
+ if !swag.IsZero(respD) && !swag.IsZero(respD.Payload) {
+ msgStr,err := json.Marshal(respD.Payload)
+ if err != nil{
+ return "", err
+ }
+ return string(msgStr), nil
+ }
+ }
+ {{ else }}
+ // Non schema case: warning {{.Name}} is not supported
+ {{ end }}
+ {{ end }} {{ end }}
+ {{- range $i, $v := .Responses }}
+ {{ if .Schema }}
+ var iResp{{$i}} interface{} = respErr
+ resp{{$i}}, ok := iResp{{$i}}.(*{{ .Package }}.{{ pascalize .Name }})
+ if ok {
+ if !swag.IsZero(resp{{$i}}) && !swag.IsZero(resp{{$i}}.Payload) {
+ msgStr,err := json.Marshal(resp{{$i}}.Payload)
+ if err != nil{
+ return "", err
+ }
+ return string(msgStr), nil
+ }
+ }
+ {{ else }}
+ // Non schema case: warning {{.Name}} is not supported
+ {{ end }}
+ {{ end }}
+ return "", respErr
+ }
+ {{- range $i, $v := .SuccessResponses }}
+ {{ if .Schema }}
+ {{- with .Schema}}
+ if !swag.IsZero(resp{{$i}}) && !swag.IsZero(resp{{$i}}.Payload) {
+ {{- if or .IsComplexObject .IsArray .IsMap }}
+ msgStr,err := json.Marshal(resp{{$i}}.Payload)
+ if err != nil{
+ return "", err
+ }
+ {{- else }}
+ msgStr := fmt.Sprintf("%v", resp{{$i}}.Payload)
+ {{- end }}
+ return string(msgStr), nil
+ }
+ {{- end }}
+ {{ else }}
+ // warning: non schema response {{.Name}} is not supported by go-swagger cli yet.
+ {{ end }}
+ {{ end }}
+ return "", nil
+}
+
+{{/*for models defined in params, generate their register and retrieve flags functions*/}}
+{{- range .ExtraSchemas }}
+ {{ template "modelschemacli" . }}
+{{- end}}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl
new file mode 100644
index 000000000..637811155
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl
@@ -0,0 +1,97 @@
+{{/*util functions to run or register cmd flags*/}}
+
+{{ define "flagdescriptionvar" }}
+ {{- $fullDescription := (escapeBackticks .Description) }}
+ {{- if .Required}}
+ {{- $fullDescription = printf "Required. %v" $fullDescription}}
+ {{- end}}
+ {{- if .Enum }}
+ {{- $fullDescription = printf "Enum: %v. %v" (json .Enum) $fullDescription}}
+ {{- end }}
+ {{ camelize .Name }}Description := `{{ $fullDescription }}`
+{{ end }}
+
+{{ define "flagnamevar" }}
+ {{- $flagNameVar := printf "%vFlagName" (camelize .Name) }}
+ var {{ $flagNameVar }} string
+ if cmdPrefix == "" {
+ {{ $flagNameVar }} = "{{ .Name }}"
+ }else{
+ {{ $flagNameVar }} = fmt.Sprintf("%v.{{ .Name }}", cmdPrefix)
+ }
+{{ end }}
+
+{{ define "flagdefaultvar" }}
+ {{ $defaultVar := printf "%vFlagDefault" (camelize .Name) }}
+ var {{ $defaultVar}} {{ .GoType }} {{ if .Default }}= {{ printf "%#v" .Default }}{{ end }}
+{{ end }}
+
+{{/* Not used. CLI does not mark flag as required, and required will be checked by validation in future */}}
+{{/* {{ define "requiredregistrator" }}
+ if err := cmd.MarkPersistentFlagRequired({{ camelize .Name }}FlagName); err != nil{
+ return err
+ }
+{{ end }} */}}
+
+{{ define "enumcompletion" }} {{/*only used for primitive types. completion type is always string.*/}}
+{{ if .Enum }}
+if err := cmd.RegisterFlagCompletionFunc({{ camelize .Name }}FlagName,
+ func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
+ var res []string
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ return res, cobra.ShellCompDirectiveDefault
+ }); err != nil{
+ return err
+}
+{{ end }}
+{{ end }}
+
+{{/* intended to be used on struct GenSchema with .IsPrimitive */}}
+{{ define "primitiveregistrator" }}
+ {{- if or (eq .GoType "int64") (eq .GoType "int32") (eq .GoType "string") (eq .GoType "float64") (eq .GoType "float32") (eq .GoType "bool") }}
+ {{ template "flagdescriptionvar" . }}
+ {{ template "flagnamevar" . }}
+ {{ template "flagdefaultvar" . }}
+ _ = cmd.PersistentFlags().{{ pascalize .GoType }}({{ camelize .Name }}FlagName, {{ camelize .Name }}FlagDefault, {{ (camelize .Name) }}Description)
+ {{ template "enumcompletion" . }}
+ {{- else if or (eq .GoType "strfmt.DateTime") (eq .GoType "strfmt.UUID") (eq .GoType "strfmt.ObjectId") }} {{/* read as string */}}
+ {{ template "flagdescriptionvar" . }}
+ {{ template "flagnamevar" . }}
+ _ = cmd.PersistentFlags().String({{ camelize .Name }}FlagName, "", {{ (camelize .Name) }}Description)
+ {{ template "enumcompletion" . }}
+ {{- else }}
+ // warning: primitive {{.Name}} {{.GoType }} is not supported by go-swagger cli yet
+ {{- end }}
+{{ end }}
+
+{{ define "arrayregistrator" }}
+ {{- if or (eq .GoType "[]int64") (eq .GoType "[]int32") (eq .GoType "[]string") (eq .GoType "[]float64") (eq .GoType "[]float32") (eq .GoType "[]bool") }}
+ {{ template "flagdescriptionvar" . }}
+ {{ template "flagnamevar" . }}
+ {{ template "flagdefaultvar" . }}
+ _ = cmd.PersistentFlags().{{ pascalize .GoType }}Slice({{ camelize .Name }}FlagName, {{ camelize .Name }}FlagDefault, {{ (camelize .Name) }}Description)
+ {{ template "enumcompletion" . }}
+ {{- else if or (eq .GoType "[]strfmt.DateTime") (eq .GoType "[]strfmt.UUID") (eq .GoType "[]strfmt.ObjectId") }} {{/* read as string */}}
+ {{ template "flagdescriptionvar" . }}
+ {{ template "flagnamevar" . }}
+ _ = cmd.PersistentFlags().StringSlice({{ camelize .Name }}FlagName, []string{}, {{ (camelize .Name) }}Description)
+ {{- else }}
+ // warning: array {{.Name}} {{.GoType }} is not supported by go-swagger cli yet
+ {{- end }}
+{{ end }}
+
+
+{{/* each body parameter gets a string flag to input json raw string */}}
+{{ define "modelparamstringregistrator" }}
+ {{ template "flagnamevar" . }}
+ _ = cmd.PersistentFlags().String({{ camelize .Name }}FlagName, "", "Optional json string for [{{ .Name }}]. {{ .Description }}")
+{{ end }}
+
+{{ define "modelparamregistrator" }} {{/* register a param that has a schema */}}
+ // add flags for body {{/*use go type as the flag prefix. There is no good way to determine the original str case in spec*/}}
+ if err := registerModel{{ pascalize (dropPackage .GoType) }}Flags(0, "{{ camelize (dropPackage .GoType) }}", cmd); err != nil {
+ return err
+ }
+{{ end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl
new file mode 100644
index 000000000..a1ff1e5de
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl
@@ -0,0 +1,59 @@
+{{/*util functions to retrieve flags*/}}
+
+{{ define "primitiveretriever" }}
+ {{- $flagValueVar := printf "%vFlagValue" (camelize .Name) }}
+ {{- $flagNameVar := printf "%vFlagName" (camelize .Name )}}
+ {{- if or (eq .GoType "int64") (eq .GoType "int32") (eq .GoType "string") (eq .GoType "float64") (eq .GoType "float32") (eq .GoType "bool") }}
+ {{ template "flagnamevar" . }}
+ {{ $flagValueVar }}, err := cmd.Flags().Get{{pascalize .GoType}}({{ $flagNameVar }})
+ if err != nil{
+ return err, false
+ }
+ {{- /* reciever by convention is m for CLI */}}
+ m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }}
+ {{- else if or (eq .GoType "strfmt.DateTime") (eq .GoType "strfmt.ObjectId") (eq .GoType "strfmt.UUID" ) }} {{/*Get flag value as string, then parse it*/}}
+ {{/*Many of the strfmt types can be added here*/}}
+ {{ template "flagnamevar" . }}
+ {{ $flagValueVar }}Str, err := cmd.Flags().GetString({{ $flagNameVar }})
+ if err != nil{
+ return err, false
+ }
+ var {{ $flagValueVar }} {{ .GoType }}
+ if err := {{ $flagValueVar }}.UnmarshalText([]byte({{ $flagValueVar }}Str)); err != nil{
+ return err, false
+ }
+ m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }}
+ {{- else }}
+ // warning: primitive {{.Name}} {{.GoType }} is not supported by go-swagger cli yet
+ {{- end }}
+{{ end }}
+
+{{ define "arrayretriever" }}
+ {{- $flagValueVar := printf "%vFlagValues" (camelize .Name) }}
+ {{- $flagNameVar := printf "%vFlagName" (camelize .Name )}}
+ {{- if or (eq .GoType "[]int64") (eq .GoType "[]int32") (eq .GoType "[]string") (eq .GoType "[]float64") (eq .GoType "[]float32") (eq .GoType "[]bool") }}
+ {{ template "flagnamevar" . }}
+ {{ $flagValueVar }}, err := cmd.Flags().Get{{pascalize .GoType}}Slice({{ $flagNameVar }})
+ if err != nil{
+ return err, false
+ }
+ {{- /* reciever by convention is m for CLI */}}
+ m.{{ pascalize .Name }} = {{ $flagValueVar }}
+ {{- else if or (eq .GoType "[]strfmt.DateTime") (eq .GoType "[]strfmt.ObjectId") (eq .GoType "[]strfmt.UUID") }} {{/*Get flag value as string, then parse it*/}}
+ {{ template "flagnamevar" . }}
+ {{ $flagValueVar }}Str, err := cmd.Flags().GetStringSlice({{ $flagNameVar }})
+ if err != nil{
+ return err, false
+ }
+
+ {{ $flagValueVar }} := make({{ .GoType }}, len({{ $flagValueVar }}Str))
+ for i, v := range {{ $flagValueVar }}Str {
+ if err := {{ $flagValueVar }}[i].UnmarshalText([]byte(v)); err != nil{
+ return err, false
+ }
+ }
+ m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }}
+ {{- else }}
+ // warning: array {{.Name}} {{.GoType }} is not supported by go-swagger cli yet
+ {{- end }}
+{{ end }} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl
new file mode 100644
index 000000000..2dc42aebc
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl
@@ -0,0 +1,193 @@
+{{/*util functions to generate register and retrieve functions for a model*/}}
+
+{{ define "modelschemacli" }}
+ {{/*some guards to prevent rendering unsupported models types. TODO: remove this guard*/}}
+ {{if or .IsPrimitive .IsComplexObject }}
+ {{ template "modelschemacliinternal" . }}
+ {{ else }}
+ // Name: [{{.Name}}], Type:[{{ .GoType }}], register and retrieve functions are not rendered by go-swagger cli
+ {{ end }}
+{{ end }}
+
+{{/*since register and retrieve are the same for properties and all of, share them here*/}}
+{{ define "propertyregistor" }}
+ {{- if .IsPrimitive }}
+ {{ template "primitiveregistrator" . }}
+ {{- else if .IsArray }}
+ // warning: {{.Name}} {{ .GoType }} array type is not supported by go-swagger cli yet
+ {{- else if .IsMap }}
+ // warning: {{.Name}} {{ .GoType }} map type is not supported by go-swagger cli yet
+ {{- else if .IsComplexObject }} {{/* struct case */}}
+ {{ template "flagnamevar" . }}
+ if err := registerModel{{pascalize (dropPackage .GoType) }}Flags(depth + 1, {{ camelize .Name }}FlagName, cmd); err != nil{
+ return err
+ }
+ {{- else }}
+ // warning: {{.Name}} {{ .GoType }} unkown type is not supported by go-swagger cli yet
+ {{- end }}
+{{ end }}
+
+{{ define "propertyretriever" }}
+ {{- $flagNameVar := printf "%vFlagName" (camelize .Name) }}
+ {{- $flagValueVar := printf "%vFlagValue" (camelize .Name) }}
+ {{ $flagNameVar }} := fmt.Sprintf("%v.{{ .Name }}", cmdPrefix)
+ if cmd.Flags().Changed({{ $flagNameVar }}) {
+ {{- if .IsPrimitive }}
+ {{ template "primitiveretriever" . }}
+ retAdded = true
+ {{- else if .IsArray }}
+ // warning: {{ .Name }} array type {{ .GoType }} is not supported by go-swagger cli yet
+ {{- else if .IsMap }}
+ // warning: {{ .Name }} map type {{ .GoType }} is not supported by go-swagger cli yet
+ {{- else if .IsComplexObject }}
+ // info: complex object {{.Name}} {{.GoType}} is retrieved outside this Changed() block
+ {{- else }}
+ // warning: {{.Name}} {{ .GoType }} unkown type is not supported by go-swagger cli yet
+ {{- end }}
+ }
+ {{- if and .IsComplexObject (not .IsArray) (not .IsMap) (not .IsStream) }}
+ {{ $flagValueVar }} := m.{{pascalize .Name}}
+ if swag.IsZero({{ $flagValueVar }}){
+ {{ $flagValueVar }} = {{if .IsNullable }}&{{end}}{{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{}
+ }
+ {{/* always lift the payload to pointer and pass to model retrieve function. If .GoType has pkg str, use it, else use .Pkg+.GoType */}}
+ err, {{camelize .Name }}Added := retrieveModel{{pascalize (dropPackage .GoType) }}Flags(depth + 1, {{if not .IsNullable }}&{{end}}{{ $flagValueVar }}, {{ $flagNameVar }}, cmd)
+ if err != nil{
+ return err, false
+ }
+ retAdded = retAdded || {{camelize .Name }}Added
+ if {{camelize .Name }}Added {
+ m.{{pascalize .Name}} = {{ $flagValueVar }}
+ }
+ {{- end }}
+{{ end }}
+
+{{ define "modelschemacliinternal" }} {{/*used by model definition and in params model*/}}
+{{- $modelName := .Name }}
+{{/*model package is filled by generator*/}}
+{{ $modelPkg := toPackageName .Pkg}}
+{{ $modelType := .GoType }}
+
+// register flags to command
+func registerModel{{pascalize .Name}}Flags(depth int, cmdPrefix string, cmd *cobra.Command) error {
+ {{ range .AllOf }}
+ {{- if not .IsAnonymous }}{{/* named type composition */}}
+ {{ if or .IsPrimitive .IsComplexObject }}
+ // register embedded {{ .GoType }} flags
+ {{/*defer all of registration to the model's regristor method. embed should not lift cmdPrefix */}}
+ if err := registerModel{{ pascalize (dropPackage .GoType) }}Flags(depth, cmdPrefix, cmd); err != nil{
+ return err
+ }
+ {{ else }}
+ // {{ .Name }} {{ .GoType }} register is skipped
+ {{ end }}
+ {{ else }}{{/*inline definition. assume only properties are used*/}}
+ // register anonymous fields for {{.Name}}
+ {{ $anonName := .Name }}
+ {{ range .Properties }}
+ if err := register{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}(depth, cmdPrefix, cmd); err != nil{
+ return err
+ }
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ if err := register{{ pascalize $modelName }}{{ pascalize .Name }}(depth, cmdPrefix, cmd); err != nil{
+ return err
+ }
+ {{ end }}
+ return nil
+}
+
+{{ range .AllOf }}
+ {{- if .IsAnonymous }}{{/* inline definition. schema case is defered. */}}
+// inline definition name {{ .Name }}, type {{.GoType}}
+ {{ $anonName := .Name }}
+ {{ range .Properties }}
+func register{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}(depth int, cmdPrefix string, cmd *cobra.Command) error {
+ if depth > maxDepth {
+ return nil
+ }
+ {{ template "propertyregistor" . }}
+ return nil
+}
+ {{ end }}
+ {{ end }}
+{{ end }}
+
+{{/*register functions for each fields in this model */}}
+{{ range .Properties }}
+func register{{ pascalize $modelName }}{{ pascalize .Name }}(depth int, cmdPrefix string, cmd *cobra.Command) error{
+ if depth > maxDepth {
+ return nil
+ }
+ {{ template "propertyregistor" .}}
+ return nil
+}
+{{ end }} {{/*Properties*/}}
+
+// retrieve flags from commands, and set value in model. Return true if any flag is passed by user to fill model field.
+func retrieveModel{{pascalize $modelName }}Flags(depth int, m *{{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}, cmdPrefix string, cmd *cobra.Command) (error, bool) {
+ retAdded := false
+ {{ range .AllOf }}
+ {{- if not .IsAnonymous }}{{/* named type composition */}}
+ {{ if or .IsPrimitive .IsComplexObject }}
+ // retrieve model {{.GoType}}
+ err, {{camelize .Name }}Added := retrieveModel{{ pascalize (dropPackage .GoType) }}Flags(depth, &m.{{pascalize (dropPackage .GoType) }}, cmdPrefix, cmd)
+ if err != nil{
+ return err, false
+ }
+ retAdded = retAdded || {{camelize .Name }}Added
+ {{ else }} {{/*inline anonymous case*/}}
+
+ {{ end }}
+ {{- else }}
+ // retrieve allOf {{.Name}} fields
+ {{ $anonName := .Name }}
+ {{ range .Properties }}
+ err, {{camelize .Name}}Added := retrieve{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}Flags(depth, m, cmdPrefix, cmd)
+ if err != nil{
+ return err, false
+ }
+ retAdded = retAdded || {{ camelize .Name }}Added
+ {{ end }}
+ {{- end }}
+ {{ end }}
+ {{ range .Properties }}
+ err, {{ camelize .Name }}Added := retrieve{{pascalize $modelName }}{{pascalize .Name }}Flags(depth, m, cmdPrefix, cmd)
+ if err != nil{
+ return err, false
+ }
+ retAdded = retAdded || {{ camelize .Name }}Added
+ {{ end }}
+ return nil, retAdded
+}
+
+{{ range .AllOf }}
+ {{- if .IsAnonymous }}{{/* inline definition. schema case is defered. */}}
+// define retrieve functions for fields for inline definition name {{ .Name }}
+ {{ $anonName := .Name }}
+ {{ range .Properties }} {{/*anonymous fields will be registered directly on parent model*/}}
+func retrieve{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}Flags(depth int, m *{{if containsPkgStr $modelType}}{{ $modelType }}{{else}}{{ $modelPkg }}.{{$modelType}}{{ end }},cmdPrefix string, cmd *cobra.Command) (error,bool) {
+ if depth > maxDepth {
+ return nil, false
+ }
+ retAdded := false
+ {{ template "propertyretriever" . }}
+ return nil, retAdded
+}
+ {{ end }}
+ {{ end }}
+{{ end }}
+
+{{ range .Properties }}
+func retrieve{{pascalize $modelName }}{{pascalize .Name }}Flags(depth int, m *{{if $modelPkg}}{{$modelPkg}}.{{ dropPackage $modelType }}{{else}}{{ $modelType }}{{end}}, cmdPrefix string, cmd *cobra.Command) (error, bool) {
+ if depth > maxDepth {
+ return nil, false
+ }
+ retAdded := false
+ {{ template "propertyretriever" . }}
+ return nil, retAdded
+}
+{{ end }} {{/*properties*/}}
+{{ end }} {{/*define*/}} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl
new file mode 100644
index 000000000..3d01e9dcc
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl
@@ -0,0 +1,127 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Name }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "fmt"
+ "io"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// New creates a new {{ humanize .Name }} API client.
+func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService {
+ return &Client{transport: transport, formats: formats}
+}
+
+/*
+Client {{ if .Summary }}{{ .Summary }}{{ if .Description }}
+
+{{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}for {{ humanize .Name }} API{{ end }}
+*/
+type Client struct {
+ transport runtime.ClientTransport
+ formats strfmt.Registry
+}
+
+// ClientOption is the option for Client methods
+type ClientOption func(*runtime.ClientOperation)
+
+// ClientService is the interface for Client methods
+type ClientService interface {
+ {{ range .Operations }}
+ {{ pascalize .Name }}(params *{{ pascalize .Name }}Params{{ if .Authorized }}, authInfo runtime.ClientAuthInfoWriter{{end}}{{ if .HasStreamingResponse }}, writer io.Writer{{ end }}, opts ...ClientOption) {{ if .SuccessResponse }}({{ range .SuccessResponses }}*{{ pascalize .Name }}, {{ end }}{{ end }}error{{ if .SuccessResponse }}){{ end }}
+ {{ end }}
+
+ SetTransport(transport runtime.ClientTransport)
+}
+
+{{ range .Operations }}
+/*
+ {{ pascalize .Name }} {{ if .Summary }}{{ pluralizeFirstWord (humanize .Summary) }}{{ if .Description }}
+
+ {{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}{{ humanize .Name }} API{{ end }}
+*/
+func (a *Client) {{ pascalize .Name }}(params *{{ pascalize .Name }}Params{{ if .Authorized }}, authInfo runtime.ClientAuthInfoWriter{{end}}{{ if .HasStreamingResponse }}, writer io.Writer{{ end }}, opts ...ClientOption) {{ if .SuccessResponse }}({{ range .SuccessResponses }}*{{ pascalize .Name }}, {{ end }}{{ end }}error{{ if .SuccessResponse }}){{ end }} {
+ // TODO: Validate the params before sending
+ if params == nil {
+ params = New{{ pascalize .Name }}Params()
+ }
+ op := &runtime.ClientOperation{
+ ID: {{ printf "%q" .Name }},
+ Method: {{ printf "%q" .Method }},
+ PathPattern: {{ printf "%q" .Path }},
+ ProducesMediaTypes: {{ printf "%#v" .ProducesMediaTypes }},
+ ConsumesMediaTypes: {{ printf "%#v" .ConsumesMediaTypes }},
+ Schemes: {{ printf "%#v" .Schemes }},
+ Params: params,
+ Reader: &{{ pascalize .Name }}Reader{formats: a.formats{{ if .HasStreamingResponse }}, writer: writer{{ end }}},{{ if .Authorized }}
+ AuthInfo: authInfo,{{ end}}
+ Context: params.Context,
+ Client: params.HTTPClient,
+ }
+ for _, opt := range opts {
+ opt(op)
+ }
+ {{ $length := len .SuccessResponses }}
+ {{ if .SuccessResponse }}result{{else}}_{{ end }}, err := a.transport.Submit(op)
+ if err != nil {
+ return {{ if .SuccessResponse }}{{ padSurround "nil" "nil" 0 $length }}, {{ end }}err
+ }
+ {{- if .SuccessResponse }}
+ {{- if eq $length 1 }}
+ success, ok := result.(*{{ pascalize .SuccessResponse.Name }})
+ if ok {
+ return success,nil
+ }
+ // unexpected success response
+ {{- if .DefaultResponse }}{{/* if a default response is provided, fill this and return an error */}}
+ unexpectedSuccess := result.(*{{ pascalize .DefaultResponse.Name }})
+ return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
+ {{- else }}
+ // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue
+ msg := fmt.Sprintf("unexpected success response for {{ .Name }}: API contract not enforced by server. Client expected to get an error, but got: %T", result)
+ panic(msg)
+ {{- end }}
+ {{- else }}{{/* several possible success responses */}}
+ switch value := result.(type) {
+ {{- range $i, $v := .SuccessResponses }}
+ case *{{ pascalize $v.Name }}:
+ return {{ padSurround "value" "nil" $i $length }}, nil
+ {{- end }}
+ }
+ {{- if .DefaultResponse }}{{/* if a default response is provided, fill this and return an error */}}
+ // unexpected success response
+ unexpectedSuccess := result.(*{{ pascalize .DefaultResponse.Name }})
+ return {{ padSurround "nil" "nil" 0 $length }}, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
+ {{- else }}
+ // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue
+ msg := fmt.Sprintf("unexpected success response for {{ $.Name }}: API contract not enforced by server. Client expected to get an error, but got: %T", result)
+ panic(msg)
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ return nil
+ {{- end }}
+}
+{{- end }}
+
+// SetTransport changes the transport on the client
+func (a *Client) SetTransport(transport runtime.ClientTransport) {
+ a.transport = transport
+} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/facade.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/facade.gotmpl
new file mode 100644
index 000000000..287a75f92
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/facade.gotmpl
@@ -0,0 +1,129 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+
+import (
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ httptransport "github.com/go-openapi/runtime/client"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// Default {{ humanize .Name }} HTTP client.
+var Default = NewHTTPClient(nil)
+
+const (
+ // DefaultHost is the default Host
+ // found in Meta (info) section of spec file
+ DefaultHost string = {{ printf "%#v" .Host }}
+ // DefaultBasePath is the default BasePath
+ // found in Meta (info) section of spec file
+ DefaultBasePath string = {{ printf "%#v" .BasePath }}
+)
+
+// DefaultSchemes are the default schemes found in Meta (info) section of spec file
+var DefaultSchemes = {{ printf "%#v" .Schemes }}
+
+// NewHTTPClient creates a new {{ humanize .Name }} HTTP client.
+func NewHTTPClient(formats strfmt.Registry) *{{ pascalize .Name }} {
+ return NewHTTPClientWithConfig(formats, nil)
+}
+
+// NewHTTPClientWithConfig creates a new {{ humanize .Name }} HTTP client,
+// using a customizable transport config.
+func NewHTTPClientWithConfig(formats strfmt.Registry, cfg *TransportConfig) *{{ pascalize .Name }} {
+ // ensure nullable parameters have default
+ if cfg == nil {
+ cfg = DefaultTransportConfig()
+ }
+
+ // create transport and client
+ transport := httptransport.New(cfg.Host, cfg.BasePath, cfg.Schemes)
+ return New(transport, formats)
+}
+
+// New creates a new {{ humanize .Name }} client
+func New(transport runtime.ClientTransport, formats strfmt.Registry) *{{ pascalize .Name }} {
+ // ensure nullable parameters have default
+ if formats == nil {
+ formats = strfmt.Default
+ }
+
+ cli := new({{ pascalize .Name }})
+ cli.Transport = transport
+ {{- range .OperationGroups }}
+ cli.{{ pascalize .Name }} = {{ .PackageAlias }}.New(transport, formats)
+ {{- end }}
+ return cli
+}
+
+// DefaultTransportConfig creates a TransportConfig with the
+// default settings taken from the meta section of the spec file.
+func DefaultTransportConfig() *TransportConfig {
+ return &TransportConfig {
+ Host: DefaultHost,
+ BasePath: DefaultBasePath,
+ Schemes: DefaultSchemes,
+ }
+}
+
+// TransportConfig contains the transport related info,
+// found in the meta section of the spec file.
+type TransportConfig struct {
+ Host string
+ BasePath string
+ Schemes []string
+}
+
+// WithHost overrides the default host,
+// provided by the meta section of the spec file.
+func (cfg *TransportConfig) WithHost(host string) *TransportConfig {
+ cfg.Host = host
+ return cfg
+}
+
+// WithBasePath overrides the default basePath,
+// provided by the meta section of the spec file.
+func (cfg *TransportConfig) WithBasePath(basePath string) *TransportConfig {
+ cfg.BasePath = basePath
+ return cfg
+}
+
+// WithSchemes overrides the default schemes,
+// provided by the meta section of the spec file.
+func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig {
+ cfg.Schemes = schemes
+ return cfg
+}
+
+// {{ pascalize .Name }} is a client for {{ humanize .Name }}
+type {{ pascalize .Name }} struct {
+ {{ range .OperationGroups }}
+ {{ pascalize .Name }} {{ .PackageAlias }}.ClientService
+ {{ end }}
+ Transport runtime.ClientTransport
+}
+
+
+// SetTransport changes the transport on the client and all its subresources
+func (c *{{pascalize .Name}}) SetTransport(transport runtime.ClientTransport) {
+ c.Transport = transport
+ {{- range .OperationGroups }}
+ c.{{ pascalize .Name }}.SetTransport(transport)
+ {{- end }}
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/parameter.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/parameter.gotmpl
new file mode 100644
index 000000000..3546fa273
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/parameter.gotmpl
@@ -0,0 +1,406 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "time"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ cr "github.com/go-openapi/runtime/client"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// New{{ pascalize .Name }}Params creates a new {{ pascalize .Name }}Params object,
+// with the default timeout for this client.
+//
+// Default values are not hydrated, since defaults are normally applied by the API server side.
+//
+// To enforce default values in parameter, use SetDefaults or WithDefaults.
+func New{{ pascalize .Name }}Params() *{{ pascalize .Name }}Params {
+ return &{{ pascalize .Name}}Params{
+ {{ camelize .TimeoutName }}: cr.DefaultTimeout,
+ }
+}
+
+// New{{ pascalize .Name }}ParamsWithTimeout creates a new {{ pascalize .Name }}Params object
+// with the ability to set a timeout on a request.
+func New{{ pascalize .Name }}ParamsWithTimeout(timeout time.Duration) *{{ pascalize .Name }}Params {
+ return &{{ pascalize .Name}}Params{
+ {{ camelize .TimeoutName }}: timeout,
+ }
+}
+
+// New{{ pascalize .Name }}ParamsWithContext creates a new {{ pascalize .Name }}Params object
+// with the ability to set a context for a request.
+func New{{ pascalize .Name }}ParamsWithContext(ctx context.Context) *{{ pascalize .Name }}Params {
+ return &{{ pascalize .Name}}Params{
+ Context: ctx,
+ }
+}
+
+// New{{ pascalize .Name }}ParamsWithHTTPClient creates a new {{ pascalize .Name }}Params object
+// with the ability to set a custom HTTPClient for a request.
+func New{{ pascalize .Name }}ParamsWithHTTPClient(client *http.Client) *{{ pascalize .Name }}Params {
+ return &{{ pascalize .Name}}Params{
+ HTTPClient: client,
+ }
+}
+
+/* {{ pascalize .Name }}Params contains all the parameters to send to the API endpoint
+ for the {{ humanize .Name }} operation.
+
+ Typically these are written to a http.Request.
+*/
+type {{ pascalize .Name }}Params struct {
+ {{- range .Params }}
+ {{- if .Description }}
+
+ /* {{ pascalize .Name }}.
+
+ {{ blockcomment .Description }}
+ {{- if or .SwaggerFormat .Default }}
+ {{ print "" }}
+ {{- if .SwaggerFormat }}
+ Format: {{ .SwaggerFormat }}
+ {{- end }}
+ {{- if .Default }}
+ Default: {{ json .Default }}
+ {{- end }}
+ {{- end }}
+ */
+ {{- else }}
+
+ // {{ pascalize .Name }}.
+ {{- if or .SwaggerFormat .Default }}
+ //
+ {{- if .SwaggerFormat }}
+ // Format: {{ .SwaggerFormat }}
+ {{- end }}
+ {{- if .Default }}
+ // Default: {{ json .Default }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{ pascalize .ID }} {{ if and (not .IsArray) (not .IsMap) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) (or .IsNullable ) }}*{{ end }}{{ if not .IsFileParam }}{{ .GoType }}{{ else }}runtime.NamedReadCloser{{ end }}
+ {{- end }}
+
+ {{ camelize .TimeoutName }} time.Duration
+ Context context.Context
+ HTTPClient *http.Client
+}
+
+// WithDefaults hydrates default values in the {{ humanize .Name }} params (not the query body).
+//
+// All values with no default are reset to their zero value.
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) WithDefaults() *{{ pascalize .Name }}Params {
+ {{ .ReceiverName }}.SetDefaults()
+ return {{ .ReceiverName }}
+}
+
+// SetDefaults hydrates default values in the {{ humanize .Name }} params (not the query body).
+//
+// All values with no default are reset to their zero value.
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) SetDefaults() {
+{{- if .Params.HasSomeDefaults }}
+ var (
+ {{- range .Params }}
+ {{- if .HasDefault }}
+ {{- if .IsFileParam }}
+ // NOTE: no default supported for file parameter {{ .ID }}
+ {{- else if .IsStream }}
+ // NOTE: no default supported for stream parameter {{ .ID }}
+ {{- else if not .IsBodyParam }}
+ {{ template "simpleschemaDefaultsvar" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ )
+ {{- range .Params }}
+ {{- if and .HasDefault (not .IsFileParam) (not .IsStream) (not .IsBodyParam) }}
+ {{ template "simpleschemaDefaultsinit" . }}
+ {{- end }}
+ {{- end }}
+
+ val := {{ pascalize .Name }}Params{
+ {{- range .Params }}
+ {{- if and .HasDefault (not .IsFileParam) (not .IsStream) (not .IsBodyParam) }}
+ {{ pascalize .ID }}: {{ if and (not .IsArray) (not .IsMap) (not .HasDiscriminator) (or .IsNullable ) }}&{{ end }}{{ varname .ID }}Default,
+ {{- end }}
+ {{- end }}
+ }
+
+ val.{{ camelize .TimeoutName }} = {{ .ReceiverName }}.{{ camelize .TimeoutName }}
+ val.Context = {{ .ReceiverName }}.Context
+ val.HTTPClient = {{ .ReceiverName }}.HTTPClient
+ *{{ .ReceiverName }} = val
+{{- else }}
+ // no default values defined for this parameter
+{{- end }}
+}
+
+// With{{ pascalize .TimeoutName }} adds the timeout to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) With{{ pascalize .TimeoutName }}(timeout time.Duration) *{{ pascalize .Name }}Params {
+ {{ .ReceiverName }}.Set{{ pascalize .TimeoutName }}(timeout)
+ return {{ .ReceiverName }}
+}
+
+// Set{{ pascalize .TimeoutName }} adds the timeout to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) Set{{ pascalize .TimeoutName }}(timeout time.Duration) {
+ {{ .ReceiverName }}.{{ camelize .TimeoutName }} = timeout
+}
+
+// WithContext adds the context to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) WithContext(ctx context.Context) *{{ pascalize .Name }}Params {
+ {{ .ReceiverName }}.SetContext(ctx)
+ return {{ .ReceiverName }}
+}
+
+// SetContext adds the context to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) SetContext(ctx context.Context) {
+ {{ .ReceiverName }}.Context = ctx
+}
+
+// WithHTTPClient adds the HTTPClient to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) WithHTTPClient(client *http.Client) *{{ pascalize .Name }}Params {
+ {{ .ReceiverName }}.SetHTTPClient(client)
+ return {{ .ReceiverName }}
+}
+
+// SetHTTPClient adds the HTTPClient to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) SetHTTPClient(client *http.Client) {
+ {{ .ReceiverName }}.HTTPClient = client
+}
+
+{{- range .Params }}
+
+// With{{ pascalize .ID }} adds the {{ varname .Name }} to the {{ humanize $.Name }} params
+func ({{ $.ReceiverName }} *{{ pascalize $.Name }}Params) With{{ pascalize .ID }}({{ varname .Name }} {{ if and (not .IsArray) (not .IsMap) (not .HasDiscriminator) (not .IsStream) (or .IsNullable ) }}*{{ end }}{{ if not .IsFileParam }}{{ .GoType }}{{ else }}runtime.NamedReadCloser{{ end }}) *{{ pascalize $.Name }}Params {
+ {{ $.ReceiverName }}.Set{{ pascalize .ID }}({{ varname .Name }})
+ return {{ .ReceiverName }}
+}
+
+// Set{{ pascalize .ID }} adds the {{ camelize .Name }} to the {{ humanize $.Name }} params
+func ({{ $.ReceiverName }} *{{ pascalize $.Name }}Params) Set{{ pascalize .ID }}({{ varname .Name }} {{ if and (not .IsArray) (not .IsMap) (not .HasDiscriminator) (not .IsStream) (or .IsNullable ) }}*{{ end }}{{ if not .IsFileParam }}{{ .GoType }}{{ else }}runtime.NamedReadCloser{{ end }}) {
+ {{ $.ReceiverName }}.{{ pascalize .ID }} = {{ varname .Name }}
+}
+{{- end }}
+
+// WriteToRequest writes these params to a swagger request
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
+
+ if err := r.SetTimeout({{ .ReceiverName }}.{{ camelize .TimeoutName }}); err != nil {
+ return err
+ }
+ var res []error
+ {{- range .Params }}
+ {{- if not (or .IsArray .IsMap .IsBodyParam) }}
+ {{- if and .IsNullable (not .AllowEmptyValue) }}
+
+ if {{ .ValueExpression }} != nil {
+ {{- end}}
+
+ {{- if .IsQueryParam }}
+
+ // query param {{ .Name }}
+ {{- if .IsNullable }}
+ var qr{{ pascalize .Name }} {{ .GoType }}
+
+ if {{ .ValueExpression }} != nil {
+ qr{{ pascalize .Name }} = *{{ .ValueExpression }}
+ }
+ {{- else }}
+ qr{{ pascalize .Name }} := {{ .ValueExpression }}
+ {{- end}}
+ q{{ pascalize .Name}} := {{ if .Formatter }}{{ .Formatter }}(qr{{ pascalize .Name }}){{ else }}qr{{ pascalize .Name }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+ {{- if not .AllowEmptyValue }}
+ if q{{ pascalize .Name }} != "" {
+ {{- end }}
+
+ if err := r.SetQueryParam({{ printf "%q" .Name }}, q{{ pascalize .Name }}); err != nil {
+ return err
+ }
+ {{- if not .AllowEmptyValue }}
+ }
+ {{- end }}
+
+ {{- else if .IsPathParam }}
+
+ // path param {{ .Name }}
+ if err := r.SetPathParam({{ printf "%q" .Name }}, {{ if .Formatter }}{{ .Formatter }}({{ if .IsNullable }}*{{end}}{{ .ValueExpression }}){{ else }}{{ if and (not .IsCustomFormatter) .IsNullable }}*{{end}}{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}); err != nil {
+ return err
+ }
+
+ {{- else if .IsHeaderParam }}
+
+ // header param {{ .Name }}
+ if err := r.SetHeaderParam({{ printf "%q" .Name }}, {{ if .Formatter }}{{ .Formatter }}({{ if .IsNullable }}*{{end}}{{ .ValueExpression }}){{ else }}{{ if and (not .IsCustomFormatter) .IsNullable }}*{{end}}{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}); err != nil {
+ return err
+ }
+ {{- else if .IsFormParam }}
+ {{- if .IsFileParam }}
+ {{- if .IsNullable }}
+
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ // form file param {{ .Name }}
+ if err := r.SetFileParam({{ printf "%q" .Name }}, {{ .ValueExpression }}); err != nil {
+ return err
+ }
+ {{- if .IsNullable}}
+ }
+ {{- end }}
+ {{- else }}
+
+ // form param {{ .Name }}
+ {{- if .IsNullable }}
+ var fr{{ pascalize .Name }} {{ .GoType }}
+ if {{ .ValueExpression }} != nil {
+ fr{{ pascalize .Name }} = *{{ .ValueExpression }}
+ }
+ {{- else }}
+ fr{{ pascalize .Name }} := {{ .ValueExpression }}
+ {{- end}}
+ f{{ pascalize .Name}} := {{ if .Formatter }}{{ .Formatter }}(fr{{ pascalize .Name }}){{ else }}fr{{ pascalize .Name }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+ {{- if not .AllowEmptyValue }}
+ if f{{ pascalize .Name }} != "" {
+ {{- end }}
+ if err := r.SetFormParam({{ printf "%q" .Name }}, f{{ pascalize .Name }}); err != nil {
+ return err
+ }
+ {{- if not .AllowEmptyValue }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- if and .IsNullable (not .AllowEmptyValue) }}
+ }
+ {{- end }}
+ {{- else if .IsArray }}
+
+ {{- if not .IsBodyParam }}
+
+ if {{ .ValueExpression }} != nil {
+
+ {{- if .Child }}{{/* bind complex parameters (arrays and nested structures) */}}
+
+ // binding items for {{ .Name }}
+ joined{{ pascalize .Name }} := {{ .ReceiverName }}.bindParam{{ pascalize .Name }}(reg)
+ {{- else }}
+ values{{ pascalize .Name }} := {{ if and (not .IsArray) (not .IsStream) (not .IsMap) (.IsNullable) }}*{{end}}{{ .ValueExpression }}
+ joined{{ pascalize .Name}} := swag.JoinByFormat(values{{ pascalize .Name }}, "{{.CollectionFormat}}")
+ {{- end }}
+
+ {{- if .IsQueryParam }}
+
+ // query array param {{ .Name }}
+ if err := r.SetQueryParam({{ printf "%q" .Name }}, joined{{ pascalize .Name }}...); err != nil {
+ return err
+ }
+
+ {{- else if and .IsFormParam }}
+
+ // form array param {{ .Name }}
+ if err := r.SetFormParam({{ printf "%q" .Name }}, joined{{ pascalize .Name }}...); err != nil {
+ return err
+ }
+
+ {{- else if and .IsPathParam }}
+
+ // path array param {{ .Name }}
+ // SetPathParam does not support variadic arguments, since we used JoinByFormat
+ // we can send the first item in the array as it's all the items of the previous
+ // array joined together
+ if len(joined{{ pascalize .Name }}) > 0 {
+ if err := r.SetPathParam({{ printf "%q" .Name }}, joined{{ pascalize .Name }}[0]); err != nil {
+ return err
+ }
+ }
+
+ {{- else if .IsHeaderParam }}
+
+ // header array param {{ .Name }}
+ if len(joined{{ pascalize .Name }}) > 0 {
+ if err := r.SetHeaderParam({{ printf "%q" .Name }}, joined{{ pascalize .Name }}[0]); err != nil {
+ return err
+ }
+ }
+ {{- end }}
+ }
+ {{- end }}
+ {{- end }}
+
+ {{- if .IsBodyParam }}
+ {{- if or .Schema.IsInterface .Schema.IsStream (and .Schema.IsArray .Child) (and .Schema.IsMap .Child) (and .Schema.IsNullable (not .HasDiscriminator)) }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ if err := r.SetBodyParam({{ .ValueExpression }}); err != nil {
+ return err
+ }
+ {{- if or .Schema.IsInterface .Schema.IsStream (and .Schema.IsArray .Child) (and .Schema.IsMap .Child) (and .Schema.IsNullable (not .HasDiscriminator)) }}
+ }
+ {{- end }}
+ {{- end }}
+
+ {{- end }}
+
+ if len(res) > 0 {
+ return errors.CompositeValidationError(res...)
+ }
+ return nil
+}
+
+{{- range .Params }}
+ {{- if and (not .IsBodyParam) (not .IsFileParam) .IsArray }}
+// bindParam{{ pascalize $.Name }} binds the parameter {{ .Name }}
+func ({{ .ReceiverName }} *{{ pascalize $.Name }}Params) bindParam{{ pascalize .Name }}(formats strfmt.Registry) []string {
+ {{ varname .Child.ValueExpression }}R := {{ if and (not .IsArray) (not .IsStream) (not .IsMap) (.IsNullable) }}*{{end}}{{ .ValueExpression }}
+
+ {{ template "sliceclientparambinder" . }}
+
+ return {{ varname .Child.ValueExpression }}S
+}
+ {{- end }}
+{{- end }}
+{{- define "sliceclientparambinder" }}
+ {{- if .IsArray }}
+ var {{ varname .Child.ValueExpression }}C []string
+ for _, {{ varname .Child.ValueExpression }}IR := range {{ varname .Child.ValueExpression }}R { // explode {{ .GoType }}
+ {{ template "sliceclientparambinder" .Child }}
+ {{ varname .Child.ValueExpression }}C = append({{ varname .Child.ValueExpression }}C, {{ varname .Child.ValueExpression }}IV)
+ }
+
+ // {{ .Child.ItemsDepth }}CollectionFormat: {{ printf "%q" .CollectionFormat }}
+ {{ varname .Child.ValueExpression }}S := swag.JoinByFormat({{ varname .Child.ValueExpression }}C, {{ printf "%q" .CollectionFormat }})
+ {{- if .Child.Parent }}{{/* NOTE: we cannot expect a "multi" CollectionFormat within an inner array */}}
+ {{ varname .Child.ValueExpression }}V := {{ varname .Child.ValueExpression }}S[0]
+ {{- end }}
+
+ {{- else }}
+ {{ varname .ValueExpression }}IV :=
+ {{- if .IsCustomFormatter }}
+ {{- print " " }}{{ varname .ValueExpression }}IR.String()
+ {{- else if eq .GoType "string" }}
+ {{- print " " }}{{ varname .ValueExpression }}IR
+ {{- else if .Formatter }}
+ {{- print " "}}{{ .Formatter }}({{ varname .ValueExpression }}IR)
+ {{- else }}
+ {{- print " " }}fmt.Sprintf("%v", {{ varname .ValueExpression }}IR)
+ {{- end }} // {{ .GoType }} as string
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl
new file mode 100644
index 000000000..d62238540
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl
@@ -0,0 +1,346 @@
+{{- define "clientresponse" }}
+// New{{ pascalize .Name }} creates a {{ pascalize .Name }} with default headers values
+func New{{ pascalize .Name }}({{ if eq .Code -1 }}code int{{ end }}{{ if .Schema }}{{ if and (eq .Code -1) .Schema.IsStream }}, {{end}}{{ if .Schema.IsStream }}writer io.Writer{{ end }}{{ end }}) *{{ pascalize .Name }} {
+ {{- if .Headers.HasSomeDefaults }}
+ var (
+ // initialize headers with default values
+ {{- range .Headers }}
+ {{- if .HasDefault }}
+ {{ template "simpleschemaDefaultsvar" . }}
+ {{- end }}
+ {{- end }}
+ )
+ {{- range .Headers }}
+ {{- if .HasDefault }}
+ {{ template "simpleschemaDefaultsinit" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ return &{{ pascalize .Name }}{
+ {{- if eq .Code -1 }}
+ _statusCode: code,
+ {{- end }}
+ {{ range .Headers }}
+ {{- if .HasDefault }}
+ {{ pascalize .Name}}: {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}&{{ end }}{{ varname .ID }}Default,
+ {{- end }}
+ {{- end }}
+ {{- if .Schema }}
+ {{- if .Schema.IsStream }}
+ Payload: writer,
+ {{- end }}
+ {{- end }}
+ }
+}
+
+/* {{ pascalize .Name}} describes a response with status code {{ .Code }}, with default header values.
+
+ {{ if .Description }}{{ blockcomment .Description }}{{else}}{{ pascalize .Name }} {{ humanize .Name }}{{end}}
+ */
+type {{ pascalize .Name }} struct {
+ {{- if eq .Code -1 }}
+ _statusCode int
+ {{- end }}
+ {{- range .Headers }}
+ {{- if .Description }}
+
+ /* {{ blockcomment .Description }}
+ {{- if or .SwaggerFormat .Default }}
+ {{ print "" }}
+ {{- if .SwaggerFormat }}
+ Format: {{ .SwaggerFormat }}
+ {{- end }}
+ {{- if .Default }}
+ Default: {{ json .Default }}
+ {{- end }}
+ {{- end }}
+ */
+ {{- end }}
+ {{ pascalize .Name }} {{ .GoType }}
+ {{- end }}
+ {{- if .Schema }}
+
+ Payload {{ if and (not .Schema.IsBaseType) (not .Schema.IsInterface) .Schema.IsComplexObject (not .Schema.IsStream) }}*{{ end }}{{ if (not .Schema.IsStream) }}{{ .Schema.GoType }}{{ else }}io.Writer{{end}}
+ {{- end }}
+}
+
+// IsSuccess returns true when this {{ humanize .Name }} response has a 2xx status code
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) IsSuccess() bool {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode/100 == 2
+ {{- else }}
+ return {{ and (ge .Code 200) (lt .Code 300) }}
+ {{- end }}
+}
+
+// IsRedirect returns true when this {{ humanize .Name }} response has a 3xx status code
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) IsRedirect() bool {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode/100 == 3
+ {{- else }}
+ return {{ and (ge .Code 300) (lt .Code 400) }}
+ {{- end }}
+}
+
+// IsClientError returns true when this {{ humanize .Name }} response has a 4xx status code
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) IsClientError() bool {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode/100 == 4
+ {{- else }}
+ return {{ and (ge .Code 400) (lt .Code 500) }}
+ {{- end }}
+}
+
+// IsServerError returns true when this {{ humanize .Name }} response has a 5xx status code
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) IsServerError() bool {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode/100 == 5
+ {{- else }}
+ return {{ and (ge .Code 500) (lt .Code 600) }}
+ {{- end }}
+}
+
+// IsCode returns true when this {{ humanize .Name }} response a status code equal to that given
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) IsCode(code int) bool {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode == code
+ {{- else }}
+ return code == {{ .Code }}
+ {{- end }}
+}
+
+// Code gets the status code for the {{ humanize .Name }} response
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) Code() int {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode
+ {{- else }}
+ return {{ .Code }}
+ {{- end }}
+}
+
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) Error() string {
+ return fmt.Sprintf("[{{ upper .Method }} {{ .Path }}][%d] {{ if .Name }}{{ .Name }} {{ else }}unknown error {{ end }}{{ if .Schema }} %+v{{ end }}", {{ if eq .Code -1 }}{{ .ReceiverName }}._statusCode{{ else }}{{ .Code }}{{ end }}{{ if .Schema }}, o.Payload{{ end }})
+}
+
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) String() string {
+ return fmt.Sprintf("[{{ upper .Method }} {{ .Path }}][%d] {{ if .Name }}{{ .Name }} {{ else }}unknown response {{ end }}{{ if .Schema }} %+v{{ end }}", {{ if eq .Code -1 }}{{ .ReceiverName }}._statusCode{{ else }}{{ .Code }}{{ end }}{{ if .Schema }}, o.Payload{{ end }})
+}
+
+{{ if .Schema }}
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) GetPayload() {{ if and (not .Schema.IsBaseType) (not .Schema.IsInterface) .Schema.IsComplexObject (not .Schema.IsStream) }}*{{ end }}{{ if (not .Schema.IsStream) }}{{ .Schema.GoType }}{{ else }}io.Writer{{end}} {
+ return o.Payload
+}
+{{- end }}
+
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
+ {{- range .Headers }}
+
+ // hydrates response header {{.Name}}
+ hdr{{ pascalize .Name }} := response.GetHeader("{{ .Name }}")
+
+ if hdr{{ pascalize .Name }} != "" {
+ {{- if .Converter }}
+ val{{ camelize .Name }}, err := {{ .Converter }}(hdr{{ pascalize .Name }})
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, "header", "{{ .GoType }}", hdr{{ pascalize .Name }})
+ }
+ {{ .ReceiverName }}.{{ pascalize .Name }} = val{{ camelize .Name }}
+ {{- else if .Child }}
+
+ // binding header items for {{ .Name }}
+ val{{ pascalize .Name }}, err := {{ .ReceiverName }}.bindHeader{{ pascalize .Name }}(hdr{{ pascalize .Name }}, formats)
+ if err != nil {
+ return err
+ }
+
+ {{ .ReceiverName }}.{{ pascalize .Name }} = val{{ pascalize .Name }}
+ {{- else if .IsCustomFormatter }}
+ val{{ camelize .Name }}, err := formats.Parse({{ printf "%q" .SwaggerFormat }}, hdr{{ pascalize .Name }})
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, "header", "{{ .GoType }}", hdr{{ pascalize .Name }})
+ }
+ {{- if .IsNullable }}
+ v := (val{{ camelize .Name }}.({{ .GoType }}))
+ {{ .ReceiverName }}.{{ pascalize .Name }} = &v
+ {{- else }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = *(val{{ camelize .Name }}.(*{{ .GoType }}))
+ {{- end }}
+ {{- else }}
+ {{- if eq .GoType "string" }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = hdr{{ pascalize .Name }}
+ {{- else }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ .GoType }}(hdr{{ pascalize .Name }})
+ {{- end }}
+ {{- end }}
+ }
+ {{- end }}
+
+ {{- if .Schema }}
+ {{- if .Schema.IsBaseType }}
+
+ // response payload as interface type
+ payload, err := {{ toPackageName .ModelsPackage }}.Unmarshal{{ dropPackage .Schema.GoType }}{{ if .Schema.IsArray}}Slice{{ end }}(response.Body(), consumer)
+ if err != nil {
+ return err
+ }
+ {{ .ReceiverName }}.Payload = payload
+ {{- else if .Schema.IsComplexObject }}
+
+ {{ .ReceiverName }}.Payload = new({{ .Schema.GoType }})
+ {{- end }}
+ {{- if not .Schema.IsBaseType }}
+
+ // response payload
+ if err := consumer.Consume(response.Body(), {{ if not (or .Schema.IsComplexObject .Schema.IsStream) }}&{{ end}}{{ .ReceiverName }}.Payload); err != nil && err != io.EOF {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+
+ return nil
+}
+ {{- range .Headers }}
+ {{- if .Child }}
+
+// bindHeader{{ pascalize $.Name }} binds the response header {{ .Name }}
+func ({{ .ReceiverName }} *{{ pascalize $.Name }}) bindHeader{{ pascalize .Name }}(hdr string, formats strfmt.Registry) ({{ .GoType }}, error) {
+ {{ varname .Child.ValueExpression }}V := hdr
+
+ {{ template "sliceclientheaderbinder" . }}
+
+ return {{ varname .Child.ValueExpression }}C, nil
+}
+ {{- end }}
+ {{- end }}
+{{- end }}
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+
+import (
+ "io"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// {{ pascalize .Name }}Reader is a Reader for the {{ pascalize .Name }} structure.
+type {{ pascalize .Name }}Reader struct {
+ formats strfmt.Registry
+{{- if .HasStreamingResponse }}
+ writer io.Writer
+{{- end }}
+}
+
+// ReadResponse reads a server response into the received {{ .ReceiverName }}.
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
+ {{- if .Responses}}
+ switch response.Code() {
+ {{- end }}
+ {{- range .Responses }}
+ case {{ .Code }}:
+ result := New{{ pascalize .Name }}({{ if .Schema }}{{ if .Schema.IsStream }}{{ $.ReceiverName }}.writer{{ end }}{{ end }})
+ if err := result.readResponse(response, consumer, {{ $.ReceiverName }}.formats); err != nil {
+ return nil, err
+ }
+ return {{ if .IsSuccess }}result, nil{{else}}nil, result{{ end }}
+ {{- end }}
+ {{- if .DefaultResponse }}
+ {{- with .DefaultResponse }}
+ {{- if $.Responses}}
+ default:
+ {{- end }}
+ result := New{{ pascalize .Name }}(response.Code(){{ if .Schema }}{{ if .Schema.IsStream }}, {{ $.ReceiverName }}.writer{{ end }}{{ end }})
+ if err := result.readResponse(response, consumer, {{ $.ReceiverName }}.formats); err != nil {
+ return nil, err
+ }
+ if response.Code() / 100 == 2 {
+ return result, nil
+ }
+ return nil, result
+ {{- end }}
+ {{- else }}
+ {{- if $.Responses}}
+ default:
+ {{- end }}
+ return nil, runtime.NewAPIError("[{{ upper .Method }} {{ .Path }}]{{ if .Name }} {{ .Name }}{{ end }}", response, response.Code())
+ {{- end }}
+ {{- if .Responses}}
+ }
+ {{- end }}
+}
+
+{{ range .Responses }}
+ {{ template "clientresponse" . }}
+{{ end }}
+{{ if .DefaultResponse }}
+ {{ template "clientresponse" .DefaultResponse }}
+{{ end }}
+
+{{ range .ExtraSchemas }}
+/*{{ pascalize .Name }} {{ template "docstring" . }}
+swagger:model {{ .Name }}
+*/
+ {{- template "schema" . }}
+{{- end }}
+
+{{- define "sliceclientheaderbinder" }}
+ {{- if .IsArray }}
+ var (
+ {{ varname .Child.ValueExpression }}C {{ .GoType }}
+ )
+ // {{ .Child.ItemsDepth }}CollectionFormat: {{ printf "%q" .CollectionFormat }}
+ {{ varname .Child.ValueExpression }}R := swag.SplitByFormat({{ varname .Child.ValueExpression }}V, {{ printf "%q" .CollectionFormat }})
+
+ for {{ if or .Child.IsCustomFormatter .Child.Converter }}{{ .IndexVar }}{{ else }}_{{ end }}, {{ varname .Child.ValueExpression }}IV := range {{ varname .Child.ValueExpression }}R {
+ {{ template "sliceclientheaderbinder" .Child }}
+ {{ varname .Child.ValueExpression }}C = append({{ varname .Child.ValueExpression }}C, {{ varname .Child.ValueExpression }}IC) // roll-up {{ .Child.GoType }} into {{ .GoType }}
+ }
+
+ {{- else }}
+ // convert split string to {{ .GoType }}
+ {{- if .IsCustomFormatter }}
+ val, err := formats.Parse({{ printf "%q" .SwaggerFormat }}, {{ varname .ValueExpression }}IV)
+ if err != nil {
+ return nil, errors.InvalidType({{ .Path }}, "header{{ .ItemsDepth }}", "{{ .GoType }}", {{ varname .ValueExpression }}IV)
+ }
+ {{- if .IsNullable }}
+ {{ varname .ValueExpression }}IC := (&val).(*{{ .GoType }})
+ {{- else }}
+ {{ varname .ValueExpression }}IC := val.({{ .GoType }})
+ {{- end }}
+ {{- else if .Converter }}
+ val, err := {{- print " "}}{{ .Converter }}({{ varname .ValueExpression }}IV)
+ if err != nil {
+ return nil, errors.InvalidType({{ .Path }}, "header{{ .ItemsDepth }}", "{{ .GoType }}", {{ varname .ValueExpression }}IV)
+ }
+ {{- if .IsNullable }}
+ {{ varname .ValueExpression }}IC := &val
+ {{- else }}
+ {{ varname .ValueExpression }}IC := val
+ {{- end }}
+ {{- else }}
+ {{ varname .ValueExpression }}IC :=
+ {{- if eq .GoType "string" }}
+ {{- print " " }}{{ varname .ValueExpression }}IV
+ {{- else }}
+ {{- print " " }}fmt.Sprintf("%v", {{ varname .ValueExpression }}IV)
+ {{- end }} // string as {{ .GoType }}
+ {{- end }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md
new file mode 100644
index 000000000..1d36d66f5
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md
@@ -0,0 +1,311 @@
+# swagger
+
+In Stratoscale, we really like the idea of API-first services, and we also really like Go.
+We saw the go-swagger library, and thought that most of it can really help us. Generating code from
+swagger files is a big problem with a lot of corner cases, and go-swagger is doing great job.
+
+The one thing that we felt missing, is customization of the server to run with our design principles:
+
+* Custom `main` function
+* Dependency injection
+* Limited scopes with unit testing.
+
+Also:
+
+* Adding you functions to the generated `configure_swagger_*.go` seems to be a burden.
+* Lack of Interface that the service implement.
+* Complicated and custom http clients and runtime.
+
+Those are the changes that this contributor templates are providing:
+
+## Server
+
+### The new `restapi` package exposes interfaces
+
+* Those interfaces can implemented by the developer and are the business logic of the service.
+* The implementation of those is extensible.
+* The implementation is separated from the generated code.
+
+### The `restapi` returns an `http.Handler`
+
+The `restapi.Handler` (see [example](./example/restapi/configure_swagger_petstore.go)) function returns
+a standard `http.Handler`
+
+* Given objects that implements the business logic, we can create a simple http handler.
+* This handler is standard go http.Handler, so we can now use any other middleware, library, or framework
+ that support it.
+* This handler is standard, so we understand it better.
+
+## Client
+
+* The new client package exposes interfaces, so functions in our code can receive those
+ interfaces which can be mocked for testing.
+* The new client has a config that gets an `*url.URL` to customize the endpoint.
+* The new client has a config that gets an `http.RoundTripper` to customize client with libraries, middleware or
+ frameworks that support the standard library's objects.
+
+# Example Walk-Through
+
+In the [example package](https://github.com/Stratoscale/swagger/tree/master/example) you'll find generated code and usage of the pet-store
+[swagger file](./example/swagger.yaml).
+
+* The `restapi`, `models` and `client` are auto-generated by the stratoscale/swagger docker file.
+* The `internal` package was manually added and contains the server's business logic.
+* The `main.go` file is the entrypoint and contains initializations and dependency injections of the project.
+
+## Server
+
+### [restapi](https://github.com/Stratoscale/swagger/tree/master/example/restapi)
+
+This package is autogenerated and contains the server routing and parameters parsing.
+
+The modified version contains `restapi.PetAPI` and `restapi.StoreAPI` which were auto generated.
+
+```go
+// PetAPI
+type PetAPI interface {
+ PetCreate(ctx context.Context, params pet.PetCreateParams) middleware.Responder
+ PetDelete(ctx context.Context, params pet.PetDeleteParams) middleware.Responder
+ PetGet(ctx context.Context, params pet.PetGetParams) middleware.Responder
+ PetList(ctx context.Context, params pet.PetListParams) middleware.Responder
+ PetUpdate(ctx context.Context, params pet.PetUpdateParams) middleware.Responder
+}
+
+//go:generate mockery -name StoreAPI -inpkg
+
+// StoreAPI
+type StoreAPI interface {
+ InventoryGet(ctx context.Context, params store.InventoryGetParams) middleware.Responder
+ OrderCreate(ctx context.Context, params store.OrderCreateParams) middleware.Responder
+ // OrderDelete is For valid response try integer IDs with positive integer value. Negative or non-integer values will generate API errors
+ OrderDelete(ctx context.Context, params store.OrderDeleteParams) middleware.Responder
+ // OrderGet is For valid response try integer IDs with value >= 1 and <= 10. Other values will generated exceptions
+ OrderGet(ctx context.Context, params store.OrderGetParams) middleware.Responder
+}
+```
+
+Each function matches an `operationId` in the swagger file and they are grouped according to
+the operation `tags`.
+
+There is also a `restapi.Config`:
+
+```go
+// Config is configuration for Handler
+type Config struct {
+ PetAPI
+ StoreAPI
+ Logger func(string, ...interface{})
+ // InnerMiddleware is for the handler executors. These do not apply to the swagger.json document.
+ // The middleware executes after routing but before authentication, binding and validation
+ InnerMiddleware func(http.Handler) http.Handler
+}
+```
+
+This config is auto generated and contains all the declared interfaces above.
+It is used to initiate an http.Handler with the `Handler` function:
+
+```go
+// Handler returns an http.Handler given the handler configuration
+// It mounts all the business logic implementers in the right routing.
+func Handler(c Config) (http.Handler, error) {
+ ...
+```
+
+Let's look how we use this generated code to build our server.
+
+### [internal](https://github.com/Stratoscale/swagger/tree/master/example/internal)
+
+The `internal` package is **not** auto generated and contains the business logic of our server.
+We can see two structs that implements the `restapi.PetAPI` and `restapi.StoreAPI` interfaces,
+needed to make our server work.
+
+When adding or removing functions from our REST API, we can just add or remove functions to those
+business logic units. We can also create new logical units when they are added to our REST API.
+
+### [main.go](./example/main.go)
+
+The main function is pretty straight forward. We initiate our business logic units.
+Then create a config for our rest API. We then create a standard `http.Handler` which we can
+update with middleware, test with `httptest`, or to use with other standard tools.
+The last piece is to run the handler with `http.ListenAndServe` or to use it with an `http.Server` -
+it is all very customizable.
+
+```go
+func main() {
+ // Initiate business logic implementers.
+ // This is the main function, so here the implementers' dependencies can be
+ // injected, such as database, parameters from environment variables, or different
+ // clients for different APIs.
+ p := internal.Pet{}
+ s := internal.Store{}
+
+ // Initiate the http handler, with the objects that are implementing the business logic.
+ h, err := restapi.Handler(restapi.Config{
+ PetAPI: &p,
+ StoreAPI: &s,
+ Logger: log.Printf,
+ })
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Run the standard http server
+ log.Fatal(http.ListenAndServe(":8080", h))
+}
+```
+
+## Client
+
+The client code is in the [client package](https://github.com/Stratoscale/swagger/tree/master/example/client) and is autogenerated.
+
+To create a new client we use the `client.Config` struct:
+
+```go
+type Config struct {
+ // URL is the base URL of the upstream server
+ URL *url.URL
+ // Transport is an inner transport for the client
+ Transport http.RoundTripper
+}
+```
+
+This enables us to use custom server endpoint or custom client middleware. Easily, with the
+standard components, and with any library that accepts them.
+
+The client is then generated with the New method:
+
+```go
+// New creates a new swagger petstore HTTP client.
+func New(c Config) *SwaggerPetstore { ... }
+```
+
+This method returns an object that has two important fields:
+
+```go
+type SwaggerPetstore {
+ ...
+ Pet *pet.Client
+ Store *store.Client
+}
+```
+
+Thos fields are objects, which implements interfaces declared in the [pet](./example/client/pet) and
+[store](./example/client/store) packages:
+
+For example:
+
+```go
+// API is the interface of the pet client
+type API interface {
+ // PetCreate adds a new pet to the store
+ PetCreate(ctx context.Context, params *PetCreateParams) (*PetCreateCreated, error)
+ // PetDelete deletes a pet
+ PetDelete(ctx context.Context, params *PetDeleteParams) (*PetDeleteNoContent, error)
+ // PetGet gets pet by it s ID
+ PetGet(ctx context.Context, params *PetGetParams) (*PetGetOK, error)
+ // PetList lists pets
+ PetList(ctx context.Context, params *PetListParams) (*PetListOK, error)
+ // PetUpdate updates an existing pet
+ PetUpdate(ctx context.Context, params *PetUpdateParams) (*PetUpdateCreated, error)
+}
+```
+
+They are very similar to the server interfaces, and can be used by consumers of those APIs
+(instead of using the actual client or the `*Pet` struct)
+
+# Authentication
+
+Authenticating and policy enforcement of the application is done in several stages, described below.
+
+## Define security in swagger.yaml
+
+Add to the root of the swagger.yaml the security and security definitions sections.
+
+```yaml
+securityDefinitions:
+ token:
+ type: apiKey
+ in: header
+ name: Cookie
+
+security:
+ - token: []
+```
+
+The securityDefinitions section defines different security types that your application can handle.
+The supported types by go-swagger are:
+* `apiKey` - token that should be able to processed.
+* `oauth2` - token and scopes that should be processed.
+* and `basic` - user/password that should be processed.
+
+Here we defined an apiKey, that is passed through the Cookie header.
+
+The `security` section defines the default security enforcement for the application. You can select
+different securityDefinitions, as the keys, and apply "scopes" as the values. Those default definitions
+can be overriden in each route by a section with the same name:
+
+```yaml
+paths:
+ /pets:
+ post:
+ [...]
+ security:
+ - token: [admin]
+```
+
+Here we overriden the scope of token in the POST /pets URL so that only admin can use this API.
+
+Let's see how we can use this functionality.
+
+## Writing Security Handlers
+
+Once we created a security definition named "token", a function called "AuthToken" was added to the `restapi.Config`:
+
+```go
+type Config struct {
+ ...
+ // AuthToken Applies when the "Cookie" header is set
+ AuthToken func(token string) (interface{}, error)
+}
+```
+
+This function gets the content of the Cookie header, and should return an `interface{}` and `error`.
+The `interface{}` is the object that should represent the user that performed the request, it should
+be nil to return an unauthorized 401 HTTP response. If the returned `error` is not nil, an HTTP 500,
+internal server error will be returned.
+
+The returned object, will be stored in the request context under the `restapi.AuthKey` key.
+
+There is another function that we should know about, in the `restapi.Config` struct:
+
+```go
+type Config struct {
+ ...
+ // Authorizer is used to authorize a request after the Auth function was called using the "Auth*" functions
+ // and the principal was stored in the context in the "AuthKey" context value.
+ Authorizer func(*http.Request) error
+}
+```
+
+This one is a custom defined function that gets the request and can return an error.
+If the returned error is not nil, and 403 HTTP error will be returned to the client - here the policy
+enforcement comes to place.
+There are two things that this function should be aware of:
+
+1. The user - it can retrieve the user information from the context: `ctx.Value(restapi.AuthKey).(MyUserType)`.
+ Usually, a server will have a function for extracting this user information and returns a concrete
+ type which could be used by all the routes.
+2. The route - it can retrieve the route using the go-swagger function: `middleware.MatchedRouteFrom(*http.Request)`.
+ So no need to parse URL and test the request method.
+ This route struct contains the route information. If for example, we want to check the scopes that were
+ defined for the current route in the swagger.yaml we can use the code below:
+
+```go
+for _, auth := range route.Authenticators {
+ for scopeName, scopeValues := range auth.Scopes {
+ for _, scopeValue := range scopeValues {
+ ...
+ }
+ }
+}
+```
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/client.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/client.gotmpl
new file mode 100644
index 000000000..3398815ec
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/client.gotmpl
@@ -0,0 +1,111 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Name }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "fmt"
+ "net/http"
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/validate"
+
+ strfmt "github.com/go-openapi/strfmt"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+//go:generate mockery --name API --keeptree --with-expecter --case underscore
+
+// API is the interface of the {{ humanize .Name }} client
+type API interface {
+{{ range .Operations -}}
+/*
+{{ pascalize .Name }} {{ if .Summary }}{{ pluralizeFirstWord (humanize .Summary) }}{{ if .Description }}
+
+{{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}{{ humanize .Name }} API{{ end -}}
+*/
+ {{ pascalize .Name }}(ctx context.Context, params *{{ pascalize .Name }}Params{{ if .HasStreamingResponse }}, writer io.Writer{{ end }}) {{ if .SuccessResponse }}({{ range .SuccessResponses }}*{{ pascalize .Name }}, {{ end }}{{ end }}error{{ if .SuccessResponse }}){{ end }}
+{{ end -}}
+}
+
+// New creates a new {{ humanize .Name }} API client.
+func New(transport runtime.ClientTransport, formats strfmt.Registry, authInfo runtime.ClientAuthInfoWriter) *Client {
+ return &Client{
+ transport: transport,
+ formats: formats,
+ authInfo: authInfo,
+ }
+}
+
+/*
+Client {{ if .Summary }}{{ .Summary }}{{ if .Description }}
+
+{{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}for {{ humanize .Name }} API{{ end }}
+*/
+type Client struct {
+ transport runtime.ClientTransport
+ formats strfmt.Registry
+ authInfo runtime.ClientAuthInfoWriter
+}
+
+{{ range .Operations -}}
+/*
+{{ pascalize .Name }} {{ if .Summary }}{{ pluralizeFirstWord (humanize .Summary) }}{{ if .Description }}
+
+{{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}{{ humanize .Name }} API{{ end }}
+*/
+func (a *Client) {{ pascalize .Name }}(ctx context.Context, params *{{ pascalize .Name }}Params{{ if .HasStreamingResponse }}, writer io.Writer{{ end }}) {{ if .SuccessResponse }}({{ range .SuccessResponses }}*{{ pascalize .Name }}, {{ end }}{{ end }}error{{ if .SuccessResponse }}){{ end }} {
+ {{ $length := len .SuccessResponses }}
+ {{ $success := .SuccessResponses }}
+ {{ if .Responses }}result{{else}}_{{end}}, err := a.transport.Submit(&runtime.ClientOperation{
+ ID: {{ printf "%q" .Name }},
+ Method: {{ printf "%q" .Method }},
+ PathPattern: {{ printf "%q" .Path }},
+ ProducesMediaTypes: {{ printf "%#v" .ProducesMediaTypes }},
+ ConsumesMediaTypes: {{ printf "%#v" .ConsumesMediaTypes }},
+ Schemes: {{ printf "%#v" .Schemes }},
+ Params: params,
+ Reader: &{{ pascalize .Name }}Reader{formats: a.formats{{ if .HasStreamingResponse }}, writer: writer{{ end }}},
+ {{ if .Authorized -}}
+ AuthInfo: a.authInfo,
+ {{ end -}}
+ Context: ctx,
+ Client: params.HTTPClient,
+ })
+ if err != nil {
+ return {{ if $success }}{{ padSurround "nil" "nil" 0 $length }}, {{ end }}err
+ }
+ {{- if .Responses }}
+ switch value := result.(type) {
+ {{- range $i, $v := .Responses }}
+ case *{{ pascalize $v.Name }}:
+ {{- if $v.IsSuccess }}
+ return {{ if $success }}{{ padSurround "value" "nil" $i $length }},{{ end }}nil
+ {{- else }}
+ return {{ if $success }}{{ padSurround "nil" "nil" 0 $length }},{{ end }}runtime.NewAPIError("unsuccessful response", value, value.Code())
+ {{- end }}
+ {{- end }}
+ }
+ {{- if .DefaultResponse }}
+ // unexpected success response
+ unexpectedSuccess := result.(*{{ pascalize .DefaultResponse.Name }})
+ return {{ if $success }}{{ padSurround "nil" "nil" 0 $length }}, {{ end }}runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
+ {{- else }}
+ // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue
+ msg := fmt.Sprintf("unexpected success response for {{ .Name }}: API contract not enforced by server. Client expected to get an error, but got: %T", result)
+ panic(msg)
+ {{- end }}
+ {{- else }}
+ return nil
+ {{- end }}
+}
+{{ end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/facade.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/facade.gotmpl
new file mode 100644
index 000000000..1d658978b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/facade.gotmpl
@@ -0,0 +1,83 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+
+import (
+ "net/url"
+ "net/http"
+
+ rtclient "github.com/go-openapi/runtime/client"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/strfmt"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+const (
+ // DefaultHost is the default Host
+ // found in Meta (info) section of spec file
+ DefaultHost string = {{ printf "%#v" .Host }}
+ // DefaultBasePath is the default BasePath
+ // found in Meta (info) section of spec file
+ DefaultBasePath string = {{ printf "%#v" .BasePath }}
+)
+
+// DefaultSchemes are the default schemes found in Meta (info) section of spec file
+var DefaultSchemes = {{ printf "%#v" .Schemes }}
+
+type Config struct {
+ // URL is the base URL of the upstream server
+ URL *url.URL
+ // Transport is an inner transport for the client
+ Transport http.RoundTripper
+ // AuthInfo is for authentication
+ AuthInfo runtime.ClientAuthInfoWriter
+}
+
+// New creates a new {{ humanize .Name }} HTTP client.
+func New(c Config) *{{ pascalize .Name }} {
+ var (
+ host = DefaultHost
+ basePath = DefaultBasePath
+ schemes = DefaultSchemes
+ )
+
+ if c.URL != nil {
+ host = c.URL.Host
+ basePath = c.URL.Path
+ schemes = []string{c.URL.Scheme}
+ }
+
+ transport := rtclient.New(host, basePath, schemes)
+ if c.Transport != nil {
+ transport.Transport = c.Transport
+ }
+
+ cli := new({{ pascalize .Name }})
+ cli.Transport = transport
+ {{ range .OperationGroups -}}
+ cli.{{ pascalize .Name }} = {{ .PackageAlias }}.New(transport, strfmt.Default, c.AuthInfo)
+ {{ end -}}
+
+ return cli
+}
+
+// {{ pascalize .Name }} is a client for {{ humanize .Name }}
+type {{ pascalize .Name }} struct {
+ {{ range .OperationGroups -}}
+ {{ pascalize .Name }} {{ .PackageAlias }}.API
+ {{ end -}}
+ Transport runtime.ClientTransport
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl
new file mode 100644
index 000000000..eaee9701f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl
@@ -0,0 +1,222 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+import (
+ "context"
+ "crypto/tls"
+ "net/http"
+ "log"
+ "fmt"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/runtime/security"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+{{ $package := .Package }}
+
+type contextKey string
+
+const AuthKey contextKey = "Auth"
+
+{{ range .OperationGroups -}}
+//go:generate mockery -name {{ pascalize .Name}}API -inpkg
+
+/* {{ pascalize .Name }}API {{ .Description }} */
+type {{ pascalize .Name }}API interface {
+{{ range .Operations -}}
+ {{ if .Summary -}}
+ /* {{ pascalize .Name }} {{ .Summary }} */
+ {{ else if .Description -}}
+ /* {{ pascalize .Name }} {{ .Description }} */
+ {{ end -}}
+ {{ pascalize .Name }}(ctx context.Context, params {{.Package}}.{{ pascalize .Name }}Params) middleware.Responder
+
+{{ end -}}
+}
+{{ end }}
+
+// Config is configuration for Handler
+type Config struct {
+ {{ range .OperationGroups -}}
+ {{ pascalize .Name }}API
+ {{ end -}}
+ Logger func(string, ...interface{})
+ // InnerMiddleware is for the handler executors. These do not apply to the swagger.json document.
+ // The middleware executes after routing but before authentication, binding and validation
+ InnerMiddleware func(http.Handler) http.Handler
+
+ // Authorizer is used to authorize a request after the Auth function was called using the "Auth*" functions
+ // and the principal was stored in the context in the "AuthKey" context value.
+ Authorizer func(*http.Request) error
+
+ {{ range .SecurityDefinitions -}}
+ {{ if .IsBasicAuth -}}
+ // Auth{{ pascalize .ID }} for basic authentication
+ Auth{{ pascalize .ID }} func(user string, pass string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{ end -}}
+ {{ if .IsAPIKeyAuth -}}
+ // Auth{{ pascalize .ID }} Applies when the "{{ .Name }}" {{ .Source }} is set
+ Auth{{ pascalize .ID }} func(token string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{ end }}
+ {{ if .IsOAuth2 -}}
+ // Auth{{ pascalize .ID }} For OAuth2 authentication
+ Auth{{ pascalize .ID }} func(token string, scopes []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{ end -}}
+ {{ end -}}
+
+ // Authenticator to use for all APIKey authentication
+ APIKeyAuthenticator func(string, string, security.TokenAuthentication) runtime.Authenticator
+ // Authenticator to use for all Bearer authentication
+ BasicAuthenticator func(security.UserPassAuthentication) runtime.Authenticator
+ // Authenticator to use for all Basic authentication
+ BearerAuthenticator func(string, security.ScopedTokenAuthentication) runtime.Authenticator
+
+ {{ range .Consumes -}}
+ {{ if .Implementation -}}
+ // {{ pascalize .Name }}Consumer is a {{ .Name }} consumer that will replace the default if not nil.
+ {{ pascalize .Name }}Consumer runtime.Consumer
+ {{ end -}}
+ {{ end -}}
+}
+
+// Handler returns an http.Handler given the handler configuration
+// It mounts all the business logic implementers in the right routing.
+func Handler(c Config) (http.Handler, error) {
+ h, _, err := HandlerAPI(c)
+ return h, err
+}
+
+// HandlerAPI returns an http.Handler given the handler configuration
+// and the corresponding *{{ pascalize .Name }} instance.
+// It mounts all the business logic implementers in the right routing.
+func HandlerAPI(c Config) (http.Handler, *{{.Package}}.{{ pascalize .Name }}API, error) {
+ spec, err := loads.Analyzed(swaggerCopy(SwaggerJSON), "")
+ if err != nil {
+ return nil, nil, fmt.Errorf("analyze swagger: %v", err)
+ }
+ api := {{.Package}}.New{{ pascalize .Name }}API(spec)
+ api.ServeError = errors.ServeError
+ api.Logger = c.Logger
+
+ if c.APIKeyAuthenticator != nil {
+ api.APIKeyAuthenticator = c.APIKeyAuthenticator
+ }
+ if c.BasicAuthenticator != nil {
+ api.BasicAuthenticator = c.BasicAuthenticator
+ }
+ if c.BearerAuthenticator != nil {
+ api.BearerAuthenticator = c.BearerAuthenticator
+ }
+
+ {{ range .Consumes -}}
+ if c.{{ pascalize .Name }}Consumer != nil {
+ api.{{ pascalize .Name }}Consumer = c.{{ pascalize .Name }}Consumer
+ } else {
+ {{ if .Implementation -}}
+ api.{{ pascalize .Name }}Consumer = {{ .Implementation }}
+ {{ else }}
+ api.{{ pascalize .Name }}Consumer = runtime.ConsumerFunc(func(r io.Reader, target interface{}) error {
+ return errors.NotImplemented("{{.Name}} consumer has not yet been implemented")
+ })
+ {{ end -}}
+ }
+ {{ end -}}
+ {{ range .Produces -}}
+ {{ if .Implementation -}}
+ api.{{ pascalize .Name }}Producer = {{ .Implementation }}
+ {{ else -}}
+ api.{{ pascalize .Name }}Producer = runtime.ProducerFunc(func(w io.Writer, data interface{}) error {
+ return errors.NotImplemented("{{.Name}} producer has not yet been implemented")
+ })
+ {{ end -}}
+ {{ end -}}
+
+ {{ range .SecurityDefinitions -}}
+ {{ if .IsBasicAuth -}}
+ api.{{ pascalize .ID }}Auth = func(user string, pass string) ({{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ if c.Auth{{ pascalize .ID }} == nil {
+ {{- if eq .Principal "interface{}" }}
+ return "", nil
+ {{- else }}
+ panic("you specified a custom principal type, but did not provide the authenticator to provide this")
+ {{- end }}
+ }
+ return c.Auth{{ pascalize .ID }}(user, pass)
+ }
+ {{ end -}}
+ {{ if .IsAPIKeyAuth -}}
+ api.{{ pascalize .ID }}Auth = func(token string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ if c.Auth{{ pascalize .ID }} == nil {
+ {{- if eq .Principal "interface{}" }}
+ return token, nil
+ {{- else }}
+ panic("you specified a custom principal type, but did not provide the authenticator to provide this")
+ {{- end }}
+ }
+ return c.Auth{{ pascalize .ID }}(token)
+ }
+ {{ end }}
+ {{ if .IsOAuth2 -}}
+ api.{{ pascalize .ID }}Auth = func(token string, scopes []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ if c.Auth{{ pascalize .ID }} == nil {
+ {{- if eq .Principal "interface{}" }}
+ return token, nil
+ {{- else }}
+ panic("you specified a custom principal type, but did not provide the authenticator to provide this")
+ {{- end }}
+ }
+ return c.Auth{{ pascalize .ID }}(token, scopes)
+ }
+ {{ end -}}
+ {{ end -}}
+
+ {{ if .SecurityDefinitions -}}
+ api.APIAuthorizer = authorizer(c.Authorizer)
+ {{ end -}}
+
+ {{ range .Operations -}}
+ api.{{if ne .Package $package}}{{pascalize .Package}}{{end}}{{ pascalize .Name }}Handler =
+ {{- .PackageAlias }}.{{ pascalize .Name }}HandlerFunc(func(params {{.PackageAlias}}.{{ pascalize .Name }}Params{{if .Authorized}}, principal {{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}{{end}}) middleware.Responder {
+ ctx := params.HTTPRequest.Context()
+ {{ if .Authorized -}}
+ ctx = storeAuth(ctx, principal)
+ {{ end -}}
+ return c.{{pascalize .Package}}API.{{pascalize .Name}}(ctx, params)
+ })
+ {{ end -}}
+
+ api.ServerShutdown = func() { }
+ return api.Serve(c.InnerMiddleware), api, nil
+}
+
+// swaggerCopy copies the swagger json to prevent data races in runtime
+func swaggerCopy(orig json.RawMessage) json.RawMessage {
+ c := make(json.RawMessage, len(orig))
+ copy(c, orig)
+ return c
+}
+
+// authorizer is a helper function to implement the runtime.Authorizer interface.
+type authorizer func(*http.Request) error
+
+func (a authorizer) Authorize(req *http.Request, principal interface{}) error {
+ if a == nil {
+ return nil
+ }
+ ctx := storeAuth(req.Context(), principal)
+ return a(req.WithContext(ctx))
+}
+
+func storeAuth(ctx context.Context, principal interface{}) context.Context {
+ return context.WithValue(ctx, AuthKey, principal)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/server.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/server.gotmpl
new file mode 100644
index 000000000..0330309c5
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/server.gotmpl
@@ -0,0 +1,9 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+// this file is intentionally empty. Otherwise go-swagger will generate a server which we don't want
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl
new file mode 100644
index 000000000..8e7108be1
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl
@@ -0,0 +1,25 @@
+{{ define "docstring" }}
+ {{- if .Title }}
+ {{- comment .Title }}
+ {{- if .Description }}
+//
+// {{ comment .Description }}
+ {{- end }}
+ {{- else if .Description}}
+ {{- comment .Description }}
+ {{- else }}
+ {{- humanize .Name }}
+ {{- end }}
+ {{- if or .MinProperties .MinProperties }}
+//
+ {{- if .MinProperties }}
+// Min Properties: {{ .MinProperties }}
+ {{- end }}
+ {{- if .MaxProperties }}
+// Max Properties: {{ .MaxProperties }}
+ {{- end }}
+ {{- end }}
+ {{- if .Example }}
+// Example: {{ print .Example }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/header.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/header.gotmpl
new file mode 100644
index 000000000..a60cae1ea
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/header.gotmpl
@@ -0,0 +1,20 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+{{ if .Copyright -}}
+// {{ comment .Copyright }}
+{{- end }}
+
+package {{.Package}}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "github.com/go-openapi/strfmt"
+{{- if .DefaultImports }}
+ {{ imports .DefaultImports }}
+{{- end }}
+{{- if .Imports }}
+ {{ imports .Imports }}
+{{- end }}
+)
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl
new file mode 100644
index 000000000..8b7c6b3dd
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl
@@ -0,0 +1,527 @@
+{{- define "externalDoc" }}{{/* renders external documentation */}}
+ {{- with .ExternalDocs }}
+ {{- if .URL }}
+ {{- if .Description }}
+> [{{ trimSpace .Description }}]({{ .URL }})
+ {{- else }}
+> [Read more]({{ .URL }})
+ {{- end }}
+ {{- else }}
+> {{ trimSpace .Description }}
+ {{- end }}
+ {{- end }}
+{{- end }}
+
+{{- define "docParam" }}{{/* renders a parameter with simple schema */}}
+| {{ .Name }} | `{{ .Location }}` | {{ paramDocType . }} | `{{ .GoType }}` | {{ if .CollectionFormat }}`{{ docCollectionFormat .CollectionFormat .Child }}`{{ end }} | {{ if .Required }}✓{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }} | {{ trimSpace .Description }} |
+{{- end }}
+
+{{- define "docModelSchema" }}{{/* renders a schema */}}
+ {{- if .IsArray }}
+ {{- if .IsAliased }}
+ [{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- else if .Items }}
+ {{- if and .Items.IsPrimitive (not .Items.IsAliased) -}}
+ {{- schemaDocType . -}}
+ {{- else -}}
+ [][{{- dropPackage .Items.GoType }}](#{{ dasherize (dropPackage .Items.GoType) -}})
+ {{- end -}}
+ {{- else -}}
+ []any{{ printf " " -}}
+ {{- end -}}
+ {{- else if and .IsMap (not .IsAdditionalProperties) -}}
+ {{- if .IsAliased -}}
+ [{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- else if .ElemType }}
+ {{- if and .ElemType.IsPrimitive (not .ElemType.IsAliased) (not .ElemType.IsInterface) -}}
+ {{ schemaDocMapType . -}}
+ {{- else if .ElemType.IsInterface -}}
+ map of any{{ printf " " -}}
+ {{- else -}}
+ map of [{{- dropPackage .ElemType.GoType }}](#{{ dasherize (dropPackage .ElemType.GoType) -}})
+ {{- end -}}
+ {{- else -}}
+ map of any{{ printf " " -}}
+ {{- end -}}
+ {{- else if and .IsAliased .IsPrimitive (not .IsSuperAlias) -}}
+| Name | Type | Go type | Default | Description | Example |
+|------|------|---------| ------- |-------------|---------|
+| {{ .Name }} | {{ schemaDocType . }}| {{ .AliasedType }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} |
+{{ printf "\n" }}
+ {{- else if or (and .IsAliased (not (.IsAdditionalProperties))) (and .IsComplexObject (not .Properties) (not .AllOf)) -}}
+[{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- else if and .IsInterface (not .IsAliased) (not .IsMap) -}}
+any
+ {{- else -}}
+ {{- range .AllOf }}
+ {{- if .IsAnonymous }}
+* inlined member (*{{ .Name }}*)
+
+{{ template "docModelSchema" . }}
+ {{- else if or .IsComplexObject .IsPrimitive }}
+* composed type [{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- else }}
+* {{ template "docModelSchema" . }}
+ {{- end }}
+ {{- end }}
+ {{- if .Properties }}
+
+**{{ if .IsTuple }}Tuple members{{ else }}Properties{{ end }}**
+
+| Name | Type | Go type | Required | Default | Description | Example |
+|------|------|---------|:--------:| ------- |-------------|---------|
+ {{- range .Properties }}
+| {{ .Name }} | {{ template "docSchemaSimple" . }}| `{{ .GoType }}` | {{ if .Required }}✓{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} |
+ {{- end }}
+{{ printf "\n" }}
+ {{- end }}
+ {{- if .HasAdditionalProperties }}
+
+**Additional Properties**
+ {{- with .AdditionalProperties }}
+ {{- if .IsInterface }}
+
+any
+ {{- else if .IsPrimitive }}
+
+| Type | Go type | Default | Description | Example |
+|------|---------| ------- |-------------|---------|
+| {{ template "docSchemaSimple" . }} | `{{ .GoType }}` |{{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} |
+ {{- else }}
+
+{{ template "docModelSchema" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- if and .IsTuple .HasAdditionalItems }}
+ {{- with .AdditionalItems }}
+
+**Additional Items**
+ {{- if .IsInterface }}
+
+any
+ {{- else if .IsPrimitive }}
+
+| Type | Go type | Default | Description | Example |
+|------|---------| ------- |-------------|---------|
+| {{ template "docSchemaSimple" . }} | `{{ .GoType }}` |{{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} |
+ {{- else }}
+
+{{ template "docModelSchema" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end -}}
+{{- end }}
+
+{{- define "docModel" }}{{/* renders a definition */}}
+ {{- with .Description }}
+> {{ .}}
+ {{- end }}
+ {{- if .ExternalDocs }}
+{{ template "externalDoc" . }}
+ {{- end }}
+ {{ if or .Description .ExternalDocs }}
+{{ printf "\n" }}
+ {{- end }}
+
+{{ template "docModelSchema" .}}
+{{- end }}
+
+{{- define "docSchemaSimple" }}{{/* renders a simple property */}}
+ {{- if .IsAliased -}}
+ [{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- else if .IsArray }}
+ {{- if .Items }}
+ {{- if and .Items.IsPrimitive (not .Items.IsAliased) -}}
+ {{- schemaDocType . -}}
+ {{- else -}}
+ [][{{- dropPackage .Items.GoType }}](#{{ dasherize (dropPackage .Items.GoType) -}})
+ {{- end -}}
+ {{- else -}}
+ []any{{ printf " " -}}
+ {{- end -}}
+ {{- else if .IsMap -}}
+ {{- if .ElemType }}
+ {{- if and .ElemType.IsPrimitive (not .ElemType.IsAliased) (not .ElemType.IsInterface) -}}
+ {{ schemaDocMapType . -}}
+ {{- else if .ElemType.IsInterface -}}
+ map of any{{ printf " " -}}
+ {{- else -}}
+ map of [{{- dropPackage .ElemType.GoType }}](#{{ dasherize (dropPackage .ElemType.GoType) -}})
+ {{- end -}}
+ {{- else -}}
+ map of any{{ printf " " -}}
+ {{- end -}}
+ {{- else if .IsPrimitive -}}
+ {{- schemaDocType . -}}
+ {{- else -}}
+ [{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- end -}}
+{{- end }}
+
+{{- define "docModelBodyParam" }}{{/* layout for body param schema */}}
+| {{ .Name }} | `body` | {{ template "docSchemaSimple" .Schema }} | `{{ .Schema.GoType }}` | | {{ if .Required }}✓{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} |
+{{- end }}
+
+{{- define "docHeaders" }}{{/* renders response headers */}}
+ {{- if .Headers }}
+| Name | Type | Go type | Separator | Default | Description |
+|------|------|---------|-----------|---------|-------------|
+ {{- range .Headers }}
+| {{ .Name }} | {{ headerDocType . }} | `{{ .GoType }}` | {{ if .CollectionFormat }}`{{ docCollectionFormat .CollectionFormat .Child }}`{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }} | {{ trimSpace .Description }} |
+ {{- end }}
+ {{- end }}
+{{- end }}
+
+{{/* spec top-level information block */}}
+{{- if .Info }}
+ {{- with .Info.Title }}
+# {{ . }}
+ {{- end }}
+
+ {{- with .Info.Description }}
+{{ . }}
+ {{- end }}
+ {{ template "externalDoc" . }}
+
+ {{- if or .Info.Version .Info.License .Info.Contact .Info.TermsOfService }}
+
+## Informations
+ {{- end }}
+
+ {{- with .Info.Version }}
+
+### Version
+
+{{ . }}
+ {{- end }}
+
+ {{- with .Info.License }}
+
+### License
+
+{{ if .Name }}[{{ .Name }}]({{ end}}{{ .URL }}{{ if .Name }}){{ end }}
+ {{- end }}
+
+ {{- with .Info.Contact }}
+
+### Contact
+
+{{ .Name }} {{ .Email }} {{ .URL }}
+ {{- end }}
+
+ {{- with .Info.TermsOfService }}
+
+### Terms Of Service
+
+{{ . }}
+ {{- end }}
+{{- else }}
+ {{ template "externalDoc" . }}
+{{- end }}
+
+{{- if .Tags }}
+
+## Tags
+ {{- range .Tags }}
+
+ ### <span id="tag-{{ dasherize .Name }}"></span>{{ if .ExternalDocs }}[{{ .Name }}]({{ .ExternalDocs.URL }}{{ if .ExternalDocs.Description }} {{ printf "%q" .ExternalDocs.Description }}{{ end }}){{ else }}{{ .Name }}{{ end }}
+ {{- if .Description }}
+
+{{ .Description }}
+ {{- end }}
+ {{- end }}
+{{- end }}
+
+{{- if or .Schemes .Consumes .Produces }}
+
+## Content negotiation
+{{- end }}
+{{- if .Schemes }}
+
+### URI Schemes
+ {{- range .Schemes }}
+ * {{ . }}
+ {{- end }}
+ {{- range .ExtraSchemes }}
+ * {{ . }}
+ {{- end }}
+{{- end }}
+
+{{- if .Consumes }}
+
+### Consumes
+ {{- range .Consumes }}
+ {{- range .AllSerializers }}
+ * {{ .MediaType }}
+ {{- end }}
+ {{- end }}
+{{- end }}{{/* end .Schemes */}}
+
+{{- if .Produces }}
+
+### Produces
+ {{- range .Produces }}
+ {{- range .AllSerializers }}
+ * {{ .MediaType }}
+ {{- end }}
+ {{- end }}
+{{- end }}
+
+{{- if or .SecurityDefinitions .SecurityRequirements }}
+
+## Access control
+{{- end }}
+{{- if .SecurityDefinitions }}
+
+### Security Schemes
+ {{- range .SecurityDefinitions }}
+
+#### {{ .ID }}{{ if .Source }} ({{ .Source }}{{ with .Name }}: {{ . }}{{ end }}){{ end }}
+
+{{ .Description }}
+
+ {{- with .Type }}
+
+> **Type**: {{ . }}
+ {{- end }}
+ {{- if .IsOAuth2}}
+ {{- with .Flow }}
+>
+> **Flow**: {{ . }}
+ {{- end }}
+ {{- with .AuthorizationURL }}
+>
+> **Authorization URL**: {{ . }}
+ {{- end }}
+ {{- with .TokenURL }}
+>
+> **Token URL**: {{ . }}
+ {{- end }}
+ {{ if .ScopesDesc }}
+
+##### Scopes
+
+Name | Description
+-----|-------------
+ {{- range .ScopesDesc }}
+{{ .Name }} | {{ .Description }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{- end }}{{/* end .SecurityDefinitions */}}
+
+{{- if .SecurityRequirements }}
+
+### Security Requirements
+
+ {{- range .SecurityRequirements }}
+ * {{ .Name }}{{ if .Scopes }}: {{ range $idx, $scope := .Scopes }}{{ if gt $idx 0 }}, {{ end }}{{ $scope }}{{ end }}
+ {{- end }}
+ {{- end }}
+{{- end }}{{/* end .SecurityRequirements */}}
+
+## All endpoints{{/* an index of all API endpoints */}}
+
+{{- $alltags := .Tags }}
+{{- range .OperationGroups }}
+
+### {{ .PackageAlias }}
+ {{- $pkg := .PackageAlias }}
+ {{- range $alltags }}
+ {{- if eq .Name $pkg }}
+
+ {{ template "externalDoc" . }}
+ {{- end }}
+ {{- end }}
+
+| Method | URI | Name | Summary |
+|---------|---------|--------|---------|
+ {{- range .Operations }}
+| {{ upper .Method }} | {{ joinPath .BasePath .Path }} | [{{ humanize .Name }}](#{{ dasherize .Name }}) | {{ .Summary }} |
+ {{- end }}
+ {{ printf "\n" }}
+{{- end }}
+
+## Paths{{/* all paths to operations */}}
+
+{{- range .Operations }}
+ {{- $opname := .Name }}
+
+### <span id="{{ dasherize .Name }}"></span> {{ if .Summary }}{{ trimSpace .Summary }}{{ else }}{{ humanize .Name }}{{ end }} (*{{ .Name }}*)
+
+```
+{{ upper .Method }} {{ joinPath .BasePath .Path }}
+```
+ {{- with .Description }}
+
+{{ . }}
+ {{- end }}
+
+ {{- with .ExternalDocs }}
+
+> {{ if .URL }}[Read more]({{ .URL }} "{{ .Description }}"){{ end }}
+ {{- end }}
+
+ {{- if or (gt (len .SchemeOverrides) 0) (gt (len .ExtraSchemeOverrides) 0) }}
+
+#### URI Schemes
+
+ {{- range .SchemeOverrides }}
+ * {{ . }}
+ {{- end }}
+ {{- range .ExtraSchemeOverrides }}
+ * {{ . }}
+ {{- end }}
+ {{- end }}
+
+ {{- if .Consumes }}
+
+#### Consumes
+
+ {{- range .Consumes }}
+ * {{ . }}
+ {{- end }}
+ {{- end }}
+
+ {{- if .Produces }}
+
+#### Produces
+
+ {{- range .Produces }}
+ * {{ . }}
+ {{- end }}
+ {{- end }}
+
+ {{- if .SecurityRequirements }}
+
+#### Security Requirements
+ {{- range .SecurityRequirements }}
+ * {{ .Name }}{{ if .Scopes }}: {{ range $idx, $scope := .Scopes }}{{ if gt $idx 0 }}, {{ end }}{{ $scope }}{{ end }}{{ end }}
+ {{- end }}
+ {{- end }}
+
+ {{- if .Params }}
+
+#### Parameters
+
+| Name | Source | Type | Go type | Separator | Required | Default | Description |
+|------|--------|------|---------|-----------| :------: |---------|-------------|
+{{- range .PathParams }}{{ template "docParam" . }}{{ end }}
+{{- range .HeaderParams }}{{ template "docParam" . }}{{ end }}
+{{- range .QueryParams }}{{ template "docParam" . }}{{ end }}
+{{- range .FormParams }}{{ template "docParam" . }}{{ end }}
+{{- range .Params }}
+{{- if .IsBodyParam }}
+{{- template "docModelBodyParam" . }}
+ {{- end }}
+{{- end }}
+{{- end }}{{/* end .Params */}}
+
+#### All responses
+| Code | Status | Description | Has headers | Schema |
+|------|--------|-------------|:-----------:|--------|
+{{- range .Responses }}
+| [{{.Code}}](#{{ dasherize $opname }}-{{ .Code }}) | {{ httpStatus .Code }} | {{ trimSpace .Description }} | {{ if .Headers }}✓{{ end }} | [schema](#{{ dasherize $opname }}-{{ .Code }}-schema) |
+{{- end }}
+{{- with .DefaultResponse }}
+| [default](#{{ dasherize $opname }}-default) | | {{ trimSpace .Description }} | {{ if .Headers }}✓{{ end }} | [schema](#{{ dasherize $opname }}-default-schema) |
+{{- end }}
+
+#### Responses
+{{ range .Responses }}
+
+##### <span id="{{ dasherize $opname }}-{{ .Code }}"></span> {{.Code}}{{ if .Description }} - {{ trimSpace .Description }}{{ end }}
+Status: {{ httpStatus .Code }}
+
+###### <span id="{{ dasherize $opname }}-{{ .Code }}-schema"></span> Schema
+ {{- if .Schema }}
+ {{ template "docModel" .Schema }}
+ {{- end }}
+
+ {{- if .Examples }}
+
+###### Examples
+ {{ range .Examples }}
+**{{ .MediaType }}**
+```json
+{{ prettyjson .Example }}
+```
+ {{- end }}
+ {{- end }}
+
+ {{- if .Headers }}
+
+###### Response headers
+{{ template "docHeaders" . }}
+ {{- end }}
+{{- end }}
+
+{{- with .DefaultResponse }}
+
+##### <span id="{{ dasherize $opname }}-default"></span> Default Response
+{{ trimSpace .Description }}
+
+###### <span id="{{ dasherize $opname }}-default-schema"></span> Schema
+ {{- if .Schema }}
+{{ template "docModel" .Schema }}
+ {{- else }}
+empty schema
+ {{- end }}
+
+ {{- if .Examples }}
+
+###### Examples
+ {{ range .Examples }}
+**{{ .MediaType }}**
+```json
+{{ .Example }}
+```
+ {{- end }}
+ {{- end }}
+
+ {{- if .Headers }}
+
+###### Response headers
+{{ template "docHeaders" . }}
+ {{- end }}
+{{- end }}
+
+ {{- if .ExtraSchemas }}
+
+###### Inlined models
+ {{- range .ExtraSchemas }}
+ {{- if ne .Name "" }}
+
+**<span id="{{ dasherize .Name }}"></span> {{ .Name }}**
+
+{{ template "docModel" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+
+{{- end }}{{/* end .Operations */}}
+
+## Models
+
+{{- range .Models }}
+
+### <span id="{{ dasherize .Name }}"></span> {{ .Name }}
+
+{{ template "docModel" . }}
+
+ {{- if .ExtraSchemas }}
+
+#### Inlined models
+ {{- range .ExtraSchemas }}
+ {{- if ne .Name "" }}
+
+**<span id="{{ dasherize .Name }}"></span> {{ .Name }}**
+
+{{ template "docModel" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/model.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/model.gotmpl
new file mode 100644
index 000000000..e107a1ee1
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/model.gotmpl
@@ -0,0 +1,27 @@
+{{ template "header" . }}
+{{- if .IncludeModel }}
+ {{- if .IsExported }}
+// {{ pascalize .Name }} {{ template "docstring" . }}
+ {{- template "annotations" . }}
+ {{- end }}
+ {{- template "schema" . }}
+{{- end }}
+
+{{ range .ExtraSchemas }}
+ {{- if .IncludeModel }}
+ {{- if .IsExported }}
+// {{ pascalize .Name }} {{ template "docstring" . }}
+ {{- template "annotations" . }}
+ {{- end }}
+ {{- template "schema" . }}
+ {{- end }}
+{{- end }}
+{{- define "annotations" }}{{/* annotations to generate spec from source */}}
+ {{- if not .IsBaseType }}
+//
+// swagger:model {{ .Name }}
+ {{- else }}
+//
+// swagger:discriminator {{ .Name }} {{ .DiscriminatorField }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schema.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schema.gotmpl
new file mode 100644
index 000000000..39339d728
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schema.gotmpl
@@ -0,0 +1,131 @@
+{{- if and .IsBaseType .IsExported (not .IsSuperAlias) }}
+ {{- template "schemaPolymorphic" . }}
+{{- else if .IsSuperAlias }}
+ type {{ pascalize .Name }} {{ template "typeSchemaType" . }}{{/* For types declared as $ref on some other type, just declare the type as a golang _aliased_ type, e.g. type A = B. No method shall be redeclared. */}}
+ {{- if .IsBaseType }}
+ {{ template "baseTypeSerializer" . }}{{/* When the alias redeclares a polymorphic type, define factory methods with this alias. */}}
+ {{- end }}
+{{- else if .IsEmbedded }}
+ {{- template "schemaEmbedded" . }}
+{{- else }}
+ {{- if or .IsComplexObject .IsTuple .IsAdditionalProperties }}{{/* TODO(fred): handle case of subtype inheriting from base type with AdditionalProperties, issue #2220 */}}
+ {{ if .Name }}type {{ if not .IsExported }}{{ .Name }}{{ else }}{{ pascalize .Name }}{{ end }}{{ end }} {{ template "schemaBody" . }}
+ {{- range .Properties }}
+ {{- if .IsBaseType }}
+ // {{ pascalize .Name}} gets the {{ humanize .Name }} of this base type{{/* all properties which are of a base type propagate its interface */}}
+ func ({{ $.ReceiverName}} *{{ pascalize $.Name}}) {{ pascalize .Name}}() {{ template "schemaType" . }}{
+ {{- if eq $.DiscriminatorField .Name }}
+ return {{ printf "%q" $.DiscriminatorValue }}
+ {{- else }}
+ return {{ $.ReceiverName }}.{{camelize .Name}}Field
+ {{- end }}
+ }
+
+ // Set{{ pascalize .Name}} sets the {{ humanize .Name }} of this base type
+ func ({{ $.ReceiverName}} *{{ pascalize $.Name}}) Set{{ pascalize .Name}}(val {{ template "schemaType" . }}) {
+ {{- if ne $.DiscriminatorField .Name }}
+ {{ $.ReceiverName }}.{{camelize .Name}}Field = val
+ {{- end }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- if .Default }}{{/* TODO(fred) - issue #2189 */}}
+ func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(b []byte) error {
+ type {{ pascalize .Name }}Alias {{ pascalize .Name }}
+ var t {{ pascalize .Name }}Alias
+ if err := json.Unmarshal([]byte({{printf "%q" (json .Default)}}), &t); err != nil {
+ return err
+ }
+ if err := json.Unmarshal(b, &t); err != nil {
+ return err
+ }
+ *{{.ReceiverName}} = {{ pascalize .Name }}(t)
+ return nil
+ }
+ {{- end }}
+ {{- else }}
+ type {{ pascalize .Name }} {{ template "typeSchemaType" . }}
+ {{- end }}
+ {{- if (and .IsPrimitive .IsAliased .IsCustomFormatter (not (stringContains .Zero "(\""))) }}
+ {{ template "aliasedSerializer" . }}
+ {{- end }}
+ {{- if .IsSubType }}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{- if .IsBaseType }}
+
+ // {{ pascalize .Name}} gets the {{ humanize .Name }} of this subtype
+ func ({{$.ReceiverName}} *{{ pascalize $.Name}}) {{ pascalize .Name}}() {{ template "schemaType" . }}{
+ {{- if eq $.DiscriminatorField .Name }}
+ return {{ printf "%q" $.DiscriminatorValue }}
+ {{- else }}
+ return {{ $.ReceiverName }}.{{camelize .Name}}Field
+ {{- end }}
+ }
+
+ // Set{{ pascalize .Name}} sets the {{ humanize .Name }} of this subtype
+ func ({{$.ReceiverName}} *{{ pascalize $.Name}}) Set{{ pascalize .Name}}(val {{ template "schemaType" . }}) {
+ {{- if ne $.DiscriminatorField .Name }}
+ {{ $.ReceiverName }}.{{camelize .Name}}Field = val
+ {{- end }}
+ }
+ {{- end }}
+ {{- end }}{{/* TODO(fred): handle AdditionalProperties in base type */}}
+ {{- end }}
+ {{ template "mapOrSliceGetter" . }}
+ {{- end }}
+ {{ template "schemaSerializer" . }}
+{{- end }}
+{{- if and .IncludeValidator (not .IsSuperAlias) (not .IsEmbedded) }}{{/* aliased types type A = B do not redefine methods */}}
+ {{- if and (not (or .IsInterface .IsStream)) (or .Required .HasValidations .HasBaseType) }}
+ {{- if (eq .SwaggerType "string") }}{{/* Enum factory for enums for which we generate const (atm, only strings)*/}}
+ {{- if .Enum }}
+
+func New{{ pascalize .Name }}(value {{ .GoType }}) *{{ .GoType }} {
+ return &value
+}
+
+// Pointer returns a pointer to a freshly-allocated {{ .GoType }}.
+func ({{ .ReceiverName }} {{ .GoType }}) Pointer() *{{ .GoType }} {
+ return &{{ .ReceiverName }}
+}
+ {{- end }}
+ {{- end }}
+ {{ template "schemavalidator" . }}
+ {{- else if not (or .IsInterface .IsStream) }}
+// Validate validates this {{ humanize .Name }}{{/* this schema implements the runtime.Validatable interface but has no validations to check */}}
+func ({{.ReceiverName}} {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ if or (not .IsExported) .Discriminates }}{{ camelize .Name }}{{ else }}{{ pascalize .Name }}{{ end }}) Validate(formats strfmt.Registry) error {
+ return nil
+}
+ {{- else }}{{/* {{ .Name }} does not implement the runtime.Validatable interface: noop */}}
+ {{- end }}
+ {{- if and (not (or .IsInterface .IsStream)) (or .HasContextValidations) }}
+ {{ template "schemacontextvalidator" . }}
+ {{- else if not (or .IsInterface .IsStream) }}
+// ContextValidate validates this {{ humanize .Name }} based on context it is used {{/* this schema implements the runtime.ContextValidatable interface but has no validations to check */}}
+func ({{.ReceiverName}} {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ if or (not .IsExported) .Discriminates }}{{ camelize .Name }}{{ else }}{{ pascalize .Name }}{{ end }}) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
+ return nil
+}
+ {{- else }}{{/* {{ .Name }} does not implement the runtime.Validatable interface: noop */}}
+ {{- end }}
+{{- end }}
+{{- if .WantsMarshalBinary }}
+ {{ template "marshalBinarySerializer" . }}
+{{- end }}
+{{- define "mapOrSliceGetter" }}{{/* signature for AdditionalProperties and AdditionalItems getter funcs */}}
+ {{- if not .IsBaseType }}
+ {{- if .HasAdditionalProperties }}
+ {{- with .AdditionalProperties }}
+ // {{- template "docstring" . }}{{- template "propertyValidationDocString" . }}
+ {{ pascalize .Name }}() map[string]{{ template "schemaType" . }}
+ {{- end }}
+ {{- end }}
+ {{- with .AdditionalItems }}
+ // {{- template "docstring" . }}{{- template "propertyValidationDocString" . }}
+ {{ pascalize .Name }}() []{{ template "schemaType" . }}
+ {{- end }}
+ {{- else }}
+ // AdditionalProperties in base type shoud be handled just like regular properties{{/* TODO(fred): add full support for AdditionalProperties in base type */}}
+ // At this moment, the base type property is pushed down to the subtype
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schemabody.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemabody.gotmpl
new file mode 100644
index 000000000..947e8c01b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemabody.gotmpl
@@ -0,0 +1,330 @@
+{{ define "schemaBody" }}struct {
+ {{ range .AllOf }}
+ {{ if or (and $.IsSubType .IsBaseType .IsExported) .IsAnonymous }}
+ {{ range .Properties }}
+ {{ if ne $.DiscriminatorField .Name }}
+ {{ if or (not $.IsExported) (and $.IsSubType .IsBaseType) }}
+ {{ if $.IsTuple }}
+ {{ template "privtuplefield" . }}
+ {{ else }}
+ {{template "privstructfield" . }}
+ {{ end }}
+ {{ else }}
+ {{ if $.IsTuple }}
+ {{ template "tuplefield" . }}
+ {{ else }}
+ {{template "structfield" . }}
+ {{ end }}
+ {{ end}}
+ {{ end }}
+ {{ end }}
+ {{- if .HasAdditionalProperties }}
+ {{- if .AdditionalProperties }}
+ // {{ template "docstring" .AdditionalProperties }}
+ {{- template "propertyValidationDocString" .AdditionalProperties}}
+ {{- if and .IsExported (not .IsSubType) }}
+ {{ pascalize .AdditionalProperties.Name }}
+ {{- else if or (not .AdditionalProperties.IsExported) (.AdditionalProperties.IsBaseType) }}
+ {{ camelize .AdditionalProperties.Name }}Field
+ {{- else }}
+ {{ .AdditionalProperties.Name }}
+ {{- end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{- end }}
+ {{- end }}
+ {{- if .AdditionalItems }}
+ // {{ template "docstring" .AdditionalItems }}
+ {{- template "propertyValidationDocString" .AdditionalItems}}
+ {{- if and .IsExported (not $.IsSubType) }}{{/* TODO(fred): make sure inherited AdditionalItems are camelized */}}
+ {{ pascalize .AdditionalItems.Name }}
+ {{- else }}
+ {{ .AdditionalItems.Name }}
+ {{- end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{- end }}
+ {{ else }}{{/* named type composition */}}
+ {{ if not (and $.IsBaseType .IsExported) }}{{ .GoType }}{{ end }}
+ {{ end }}
+ {{ end }}
+ {{range .Properties}}
+ {{ if or (not $.IsExported) ($.IsBaseType) (.IsBaseType) }}
+ {{ if $.IsTuple }}{{ template "privtuplefield" . }}{{ else }}{{template "privstructfield" . }}{{ end }}{{ else }}{{ if $.IsTuple }}{{ template "tuplefield" . }}{{ else }}{{template "structfield" . }}{{ end }}{{ end}}
+ {{ end }}
+ {{ if .HasAdditionalProperties }}
+ {{- if .AdditionalProperties }}
+ // {{ template "docstring" .AdditionalProperties }}
+ {{- template "propertyValidationDocString" .AdditionalProperties}}
+ {{- if and .IsExported (not .IsSubType) }}
+ {{ pascalize .AdditionalProperties.Name }}
+ {{- else }}
+ {{ pascalize .AdditionalProperties.Name }}Field
+ {{- end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{ end }}
+ {{- end }}
+ {{- if .AdditionalItems }}
+ // {{ template "docstring" .AdditionalItems }}
+ {{- template "propertyValidationDocString" .AdditionalItems}}
+ {{ if and .IsExported (not .IsSubType) }}{{ pascalize .AdditionalItems.Name }}{{ else }}{{ pascalize .AdditionalItems.Name }}Field{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{ end }}
+}
+{{- end }}
+
+{{ define "subTypeBody" }}struct {
+ {{- range .AllOf }}
+ {{- if or (and .IsBaseType .IsExported) .IsAnonymous }}
+ {{- range .Properties }}
+ {{- if not $.IsExported }}
+ {{- if $.IsTuple }}
+ {{- template "privtuplefield" . }}
+ {{- else }}
+ {{- template "privstructfield" . }}
+ {{- end }}
+ {{- else }}
+ {{- if $.IsTuple }}
+ {{- template "tuplefield" . }}
+ {{- else }}
+ {{- template "structfield" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- if .HasAdditionalProperties }}
+ {{- if .AdditionalProperties }}
+ {{- if .IsExported }}
+ {{ pascalize .AdditionalProperties.Name }}
+ {{- else }}
+ {{ .AdditionalProperties.Name }}
+ {{- end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{- end }}
+ {{- end }}
+ {{- if .AdditionalItems }}
+ {{- if .IsExported }}
+ {{ pascalize .AdditionalItems.Name }}
+ {{- else }}
+ {{ .AdditionalItems.Name }}
+ {{- end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{- end }}
+ {{- else }}
+ {{- if not (and .IsBaseType .IsExported) }}
+ {{ .GoType }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{- if not $.IsExported }}
+ {{- if $.IsTuple }}
+ {{ template "privtuplefield" . }}
+ {{- else }}
+ {{ template "privstructfield" . }}
+ {{- end }}
+ {{- else }}
+ {{- if $.IsTuple }}
+ {{ template "tuplefield" . }}
+ {{- else }}
+ {{ template "structfield" . }}
+ {{- end }}
+ {{- end}}
+ {{- end }}
+ {{- if .HasAdditionalProperties }}
+ {{- if .AdditionalProperties }}
+ {{- if and .IsExported }}
+ {{ pascalize .AdditionalProperties.Name }}
+ {{- else }}
+ {{ pascalize .AdditionalProperties.Name }}Field
+ {{- end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{- end }}
+ {{- end }}
+ {{- if .AdditionalItems }}
+ {{- if and .IsExported (not .IsSubType) }}
+ {{ pascalize .AdditionalItems.Name }}
+ {{- else }}
+ {{ pascalize .AdditionalItems.Name }}Field
+ {{- end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{- end }}
+}
+{{- end }}
+
+{{ define "withBaseTypeBody" }}struct {
+ {{ range .AllOf }}
+ {{ if or (and .IsBaseType .IsExported) .IsAnonymous }}{{ range .Properties }}
+ {{ if not .IsExported }}{{ if .IsTuple }}{{ template "privtuplefield" . }}{{ else }}{{template "privstructfield" . }}{{ end }}{{ else }}{{ if $.IsTuple }}{{ template "tuplefield" . }}{{ else }}{{template "structfield" . }}{{ end }}{{ end}}
+ {{ end }}{{ if .HasAdditionalProperties }}{{ if .IsExported }}{{ pascalize .AdditionalProperties.Name }}{{ else }}{{ .AdditionalProperties.Name }}{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"` {{end}}
+ {{ if .AdditionalItems }}{{ if and .IsExported }}{{ pascalize .AdditionalItems.Name }}{{ else }}{{ .AdditionalItems.Name }}{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{ end }}
+ {{ else }}
+ {{ if not (and .IsBaseType .IsExported) }}{{ .GoType }}{{ end }}{{ end }}
+ {{ end }}
+ {{range .Properties}}{{ if .IsBaseType }}
+ {{ if not $.IsExported }}{{ else }}{{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`{{ end}}
+ {{end}}{{ end }}
+ {{ if .HasAdditionalProperties }}{{ if and .IsExported }}{{ pascalize .AdditionalProperties.Name }}{{ else }}{{ pascalize .AdditionalProperties.Name }}Field{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{ end }}
+ {{ if .AdditionalItems }}{{ if and .IsExported (not .IsSubType) }}{{ pascalize .AdditionalItems.Name }}{{ else }}{{ pascalize .AdditionalItems.Name }}Field{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{ end }}
+}
+{{- end }}
+
+{{ define "withoutBaseTypeBody" }}struct {
+ {{ range .AllOf }}
+ {{ if .IsAnonymous }}
+ {{ range .Properties }}
+ {{ if and .IsExported (not .IsBaseType) }}
+ {{ if .IsTuple }}
+ {{ template "tuplefield" . }}
+ {{ else }}
+ {{template "structfield" . }}
+ {{ end }}
+ {{ else }}
+ {{ pascalize .Name }} json.RawMessage `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end}}
+ {{ end }}
+ {{ if .HasAdditionalProperties }}
+ {{ if .AdditionalProperties }}
+ {{ if .IsExported }}{{ pascalize .AdditionalProperties.Name }}{{ else }}{{ .AdditionalProperties.Name }}{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{end}}
+ {{ end }}
+ {{ if .AdditionalItems }}
+ {{ if .IsExported }}{{ pascalize .AdditionalItems.Name }}{{ else }}{{ .AdditionalItems.Name }}{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{ end }}
+ {{ else }}
+ {{ if not (and .IsBaseType .IsExported) }}
+ {{ .GoType }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{range .Properties}}
+ {{ if not .IsBaseType }}
+ {{ if not $.IsExported }}
+ {{template "privstructfield" . }}
+ {{ else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end}}
+ {{ else }}
+ {{ pascalize .Name }} json.RawMessage `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{end}}
+ {{ end }}
+ {{ if .HasAdditionalProperties }}
+ {{ pascalize .AdditionalProperties.Name }}{{ if .IsExported }}Field{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{ end }}
+}
+{{- end }}
+
+{{ define "withoutBaseTypeBodyOrNonExported" }}struct {
+ {{ range .AllOf }}
+ {{ if .IsAnonymous }}
+ {{ range .Properties }}
+ {{ if and .IsExported (not .IsBaseType) }}
+ {{ if .IsTuple }}
+ {{ template "tuplefield" . }}
+ {{ else }}
+ {{template "structfield" . }}
+ {{ end }}
+ {{ end}}
+ {{ end }}
+ {{ if .HasAdditionalProperties }}
+ {{ if .AdditionalProperties }}
+ {{ if .IsExported }}{{ pascalize .AdditionalProperties.Name }}{{ else }}{{ .AdditionalProperties.Name }}{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{end}}
+ {{ end }}
+ {{ if .AdditionalItems }}
+ {{ if .IsExported }}{{ pascalize .AdditionalItems.Name }}{{ else }}{{ .AdditionalItems.Name }}{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{ end }}
+ {{ else }}
+ {{ if not (and .IsBaseType .IsExported) }}
+ {{ .GoType }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{range .Properties}}
+ {{ if not .IsBaseType }}
+ {{ if not .IsExported }}
+ {{template "privstructfield" . }}
+ {{ else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end}}
+ {{end}}
+ {{ end }}
+ {{ if .HasAdditionalProperties }}
+ {{ pascalize .AdditionalProperties.Name }}{{ if .IsExported }}Field{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{ end }}}{
+ {{ range .AllOf }}
+ {{ if .IsAnonymous }}
+ {{ range .Properties }}
+ {{ if not .IsBaseType }}
+ {{ pascalize .Name }}: {{ .ReceiverName}}.{{ pascalize .Name }},
+ {{ end }}
+ {{ end }}
+ {{ else }}
+ {{ if not (and .IsBaseType .IsExported) }}
+ {{ .GoType }}: {{ .ReceiverName }}.{{ .GoType }},
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ {{ if and (not .IsBaseType) .IsExported }}
+ {{ pascalize .Name }}: {{ .ReceiverName }}.{{ pascalize .Name }},
+ {{ end }}
+ {{ end }}
+ },
+{{- end }}
+
+{{ define "withBaseTypeBodyAndNonExported" }}struct{
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ if .IsBaseType }}
+ {{ pascalize .Name }} {{ template "schemaType" . }} `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ {{ if or (not .IsExported) .IsBaseType }}
+ {{ pascalize .Name }} {{ template "schemaType" . }} `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end }}
+ {{end}}} {
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ if .IsBaseType }}
+ {{ pascalize .Name }}:
+ {{ if ne .DiscriminatorField .Name }}
+ {{ .ReceiverName }}.{{ if .IsSubType}}{{ camelize .Name }}Field{{ else }}{{ pascalize .Name }}(){{ end }},
+ {{ else }}
+ {{ .ReceiverName }}.{{pascalize .Name}}(),
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ {{ if or (not .IsExported) .IsBaseType }}
+ {{ pascalize .Name }}: {{ .ReceiverName }}.{{ if .IsBaseType}}{{ camelize .Name }}Field{{ else }}{{ pascalize .Name }}{{ end }},
+ {{ end }}
+ {{ end }} },
+{{- end }}
+
+{{ define "withoutAdditionalBody" }}struct {
+ {{ range .AllOf }}
+ {{ if or (and $.IsSubType .IsBaseType .IsExported) .IsAnonymous }}{{ range .Properties }}
+ {{ if ne $.DiscriminatorField .Name }}{{ if or (not $.IsExported) (and $.IsSubType .IsBaseType) }}{{ if $.IsTuple }}{{ template "privtuplefield" . }}{{ else }}{{template "privstructfield" . }}{{ end }}{{ else }}{{ if $.IsTuple }}{{ template "tuplefield" . }}{{ else }}{{template "structfield" . }}{{ end }}{{ end}}{{ end }}
+ {{ end }}
+ {{ else }}
+ {{ if not (and .IsBaseType .IsExported) }}{{ .GoType }}{{ end }}{{ end }}
+ {{ end }}
+ {{range .Properties}}
+ {{ if or (not $.IsExported) (and $.IsSubType .IsBaseType) }}{{ if $.IsTuple }}{{ template "privtuplefield" . }}{{ else }}{{template "privstructfield" . }}{{ end }}{{ else }}{{ if $.IsTuple }}{{ template "tuplefield" . }}{{ else }}{{template "structfield" . }}{{ end }}{{ end}}
+ {{end}}
+}
+{{- end }}
+
+{{ define "JustBaseTypeBody" }}struct {
+ /* Just the base type fields. Used for unmashalling polymorphic types.*/
+ {{ range .AllOf }}
+ {{ if .IsBaseType }}
+ {{ range .Properties }}
+ {{ if .IsExported }}
+ {{ if .IsTuple }}
+ {{ template "tuplefield" . }}
+ {{ else }}
+ {{template "structfield" . }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schemaembedded.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemaembedded.gotmpl
new file mode 100644
index 000000000..f86c27bc6
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemaembedded.gotmpl
@@ -0,0 +1,21 @@
+{{ define "schemaEmbedded" }}
+type {{ pascalize .Name }} struct {
+ {{ if .ElemType.IsNullable }}*{{ end }}{{ .ElemType.GoType }}
+}
+
+func ({{.ReceiverName }} {{ if or .IsTuple .IsComplexObject }}*{{ end }}{{ if .Discriminates }}{{ camelize .Name }}{{ else if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) Validate(formats strfmt.Registry) error {
+ var f interface{} = {{ .ReceiverName }}.{{ dropPackage .ElemType.GoType }}
+ if v, ok := f.(runtime.Validatable) ; ok {
+ return v.Validate(formats)
+ }
+ return nil
+}
+
+func ({{.ReceiverName }} {{ if or .IsTuple .IsComplexObject }}*{{ end }}{{ if .Discriminates }}{{ camelize .Name }}{{ else if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
+ var f interface{} = {{ .ReceiverName }}.{{ dropPackage .ElemType.GoType }}
+ if v, ok := f.(runtime.ContextValidatable) ; ok {
+ return v.ContextValidate(ctx, formats)
+ }
+ return nil
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schemapolymorphic.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemapolymorphic.gotmpl
new file mode 100644
index 000000000..67b6a4fe0
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemapolymorphic.gotmpl
@@ -0,0 +1,53 @@
+{{ define "schemaPolymorphic" }}
+ type {{ pascalize .Name }} interface {
+ {{- if not (or .IsInterface .IsStream) }}{{/*
+ A base type is always Validatable.
+ Under normal conditions, we can't have a base type rendered a .IsStream or .IsInterface: this check is just for sanity check).
+
+ In the definition of the base type itself, this means that the unexported struct holding
+ the definition of the base type has a Validate() func and a ContextValitate() func.
+ */}}
+ runtime.Validatable
+ runtime.ContextValidatable
+ {{- end }}
+ {{ range .AllOf }}
+ {{- if .IsAnonymous }}
+ {{ range .Properties }}
+ {{ if $.IsTuple }}{{ template "tuplefieldIface" . }}{{ else }}{{template "structfieldIface" . }}{{ end }}
+ {{- end }}
+ {{ template "mapOrSliceGetter" . }}
+ {{- else }}
+ {{ .GoType }}
+ {{- end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{- if $.IsTuple }}
+ {{ template "tuplefieldIface" . }}
+ {{- else }}
+ {{ template "structfieldIface" . }}
+ {{- end }}
+ {{- end }}
+ {{ template "mapOrSliceGetter" . }}
+ }
+
+ type {{ camelize .Name }} {{ template "schemaBody" . }}{{/* unexported implementation of the interface (TODO(fred): atm, this is not used, issue #232) */}}
+ {{- range .Properties }}
+
+ // {{ pascalize .Name}} gets the {{ humanize .Name }} of this polymorphic type
+ func ({{ $.ReceiverName}} *{{ camelize $.Name}}) {{ pascalize .Name}}() {{ template "schemaType" . }}{
+ {{- if eq $.DiscriminatorField .Name }}
+ return {{ printf "%q" $.DiscriminatorValue }}
+ {{- else }}
+ return {{ $.ReceiverName }}.{{camelize .Name}}Field
+ {{- end }}
+ }
+
+ // Set{{ pascalize .Name}} sets the {{ humanize .Name }} of this polymorphic type
+ func ({{ $.ReceiverName}} *{{ camelize $.Name}}) Set{{ pascalize .Name}}(val {{ template "schemaType" . }}) {
+ {{- if ne $.DiscriminatorField .Name }}
+ {{ $.ReceiverName }}.{{camelize .Name}}Field = val
+ {{- end }}
+ }
+ {{- end }}{{/* TODO(fred): AdditionalProperties */}}
+ {{ template "polymorphicSerializer" . }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schematype.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schematype.gotmpl
new file mode 100644
index 000000000..cd5ef8d16
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schematype.gotmpl
@@ -0,0 +1,29 @@
+{{ define "schemaType" }}
+ {{- if and (or (gt (len .AllOf) 0) .IsAnonymous) ( not .IsMap) }}
+ {{- template "schemaBody" . }}
+ {{- else }}
+ {{- if and (not .IsMap) .IsNullable (not .IsSuperAlias) }}*{{ end }}
+ {{- if .IsSuperAlias }} = {{ end }}
+ {{- .GoType }}
+ {{- end}}
+{{- end }}
+
+{{ define "dereffedSchemaType" }}
+ {{- if and (or (gt (len .AllOf) 0) .IsAnonymous) ( not .IsMap) }}
+ {{- template "schemaBody" . }}
+ {{- else }}
+ {{- .GoType }}
+ {{- end}}
+{{- end }}
+
+{{ define "typeSchemaType" }}
+ {{- if and (or (gt (len .AllOf) 0) .IsAnonymous) ( not .IsMap) ( not .IsSuperAlias ) }}
+ {{- template "schemaBody" . }}
+ {{- else if and .IsSubType ( not .IsSuperAlias ) }}
+ {{- template "subTypeBody" . }}
+ {{- else }}
+ {{- if and (not .IsMap) .IsNullable (not .IsSuperAlias) }}*{{ end }}
+ {{- if .IsSuperAlias }} = {{ end }}
+ {{- if .AliasedType }}{{ .AliasedType }}{{ else }}{{ .GoType }}{{ end }}
+ {{- end}}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl
new file mode 100644
index 000000000..61684acd0
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl
@@ -0,0 +1,1194 @@
+{{ define "primitivefieldcontextvalidator" }}
+ {{ if .ReadOnly }}
+ if err := validate.ReadOnly(ctx, {{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil{
+ return err
+ }
+ {{ end }}
+{{ end }}
+{{ define "primitivefieldvalidator" }}
+ {{ if .Required }}
+ {{- if and (eq .GoType "string") (not .IsNullable) }}
+ if err := validate.RequiredString({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsAliased }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if .IsAliased }}){{ end }}); err != nil {
+ {{- else }}
+ if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ {{- end }}
+ return err
+ }
+ {{- end }}
+ {{ if .MinLength }}
+ if err := validate.MinLength({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MinLength }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{ if .MaxLength }}
+ if err := validate.MaxLength({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MaxLength }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if .Pattern }}
+ if err := validate.Pattern({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ToString }}, `{{ escapeBackticks .Pattern }}`); err != nil {
+ return err
+ }
+ {{- end }}
+ {{ if .Minimum }}
+ {{ template "validationMinimum" . }}
+ {{ end }}
+ {{ if .Maximum }}
+ {{ template "validationMaximum" . }}
+ {{ end }}
+ {{ if .MultipleOf }}
+ {{ template "validationMultipleOf" . }}
+ {{ end }}
+ {{ if .Enum }}
+ // value enum
+ if err := {{.ReceiverName }}.validate{{ pascalize .Name }}{{ .Suffix }}Enum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{ if and .IsCustomFormatter (not .IsStream) (not .IsBase64) }}
+ {{ template "validationCustomformat" . }}
+ {{- end }}
+{{ end }}
+
+{{ define "slicecontextvalidator" }}
+ {{ if .ReadOnly }}
+ if err := validate.ReadOnly(ctx, {{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil{
+ return err
+ }
+ {{ end }}
+ {{ if .Items }}
+ {{- if and (or .Items.ReadOnly .Items.HasContextValidations) (not .Items.IsInterface) (not .Items.IsStream) }}
+ for {{.IndexVar }} := 0; {{.IndexVar }} < len({{.ValueExpression }}); {{.IndexVar }}++ {
+ {{- with .Items }}
+ {{ template "propertycontextvalidator" . }}
+ {{- end }}
+ }
+ {{- end }}
+ {{- else }}
+ {{- if or .IsAliased (ne .ValueExpression .ReceiverName) }}{{/* prevents generated code to call itself: this is reserved for aliased types */}}
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ if err := {{.ValueExpression }}.ContextValidate(ctx, formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{ end }}
+
+{{define "slicevalidator" }}
+ {{ if .Required }}
+ if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if or .MinItems .MaxItems }}
+ {{ .IndexVar }}{{ pascalize .Name }}Size := int64(len({{.ValueExpression }}))
+ {{ end }}
+ {{ if .MinItems }}
+ if err := validate.MinItems({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .IndexVar }}{{ pascalize .Name }}Size, {{.MinItems }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if .MaxItems }}
+ if err := validate.MaxItems({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .IndexVar }}{{ pascalize .Name }}Size, {{.MaxItems }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if .UniqueItems }}
+ if err := validate.UniqueItems({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if .Enum }}
+ // for slice
+ if err := {{.ReceiverName }}.validate{{ pascalize .Name }}Enum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if .Items }}
+ {{- if and (or .Items.Required .Items.HasValidations .Items.IsBaseType .Items.IsAliased) (not .Items.IsInterface) (not .Items.IsStream) (not .Items.SkipExternalValidation) }}
+ for {{.IndexVar }} := 0; {{.IndexVar }} < len({{.ValueExpression }}); {{.IndexVar }}++ {
+ {{- with .Items }}
+ {{- if and .IsNullable (not .Required) (not .IsMapNullOverride) }}
+ {{- if .IsInterface }}
+ if {{ .ValueExpression }} == nil { // not required
+ {{- else }}
+ if swag.IsZero({{ .ValueExpression }}) { // not required
+ {{- end }}
+ continue
+ }
+ {{- end }}
+ {{ template "propertyvalidator" . }}
+ {{- end }}
+ }
+ {{- end }}
+ {{- else }}
+ {{- if and (or .IsAliased (ne .ValueExpression .ReceiverName) (not .SkipExternalValidation)) }}{{/* prevents generated code to call itself: this is reserved for aliased types */}}
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ if err := {{.ValueExpression }}.Validate(formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{ end }}
+{{ define "mapcontextvalidator" }}
+ {{- if and .Required }}
+ {{- if or .IsNullable .IsInterface }}
+ if {{ .ReceiverName }}.{{ pascalize .Name }} == nil {
+ return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, nil)
+ }
+ {{- else }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{ .ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{ if .HasAdditionalProperties }}
+ {{- if .AdditionalProperties.HasContextValidations }}
+ {{- $validatedValues := .ValueExpression }}{{ $keyVar := .AdditionalProperties.KeyVar }}
+ for {{ .AdditionalProperties.KeyVar }} := range {{ .ValueExpression }} {
+ {{ with .AdditionalProperties }}
+ {{/*Don't need to add context validate directly here since we are recursing*/}}
+ {{- if .IsPrimitive }}
+ {{- if .IsAliased }}
+ {{- if not .IsAnonymous }}
+ if val, ok := {{ $validatedValues }}[{{ $keyVar }}]; ok {
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if val != nil {
+ {{- end }}
+ if err := val.ContextValidate(ctx, formats); err != nil {
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ }
+ {{- else }}{{/* validation of anonymous objects */}}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ // TODO: context validating additional items should go here, if you see this raise an issue{{/* TODO(fred): investigate the case to remove that comment: AdditionalItems shouldn't come in maps. Upstream validation is needed to guard against this */}}
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{ end }}
+ {{ else }}
+ {{ template "primitivefieldcontextvalidator" . }}
+ {{ end }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ {{- if and (not .IsStream) (not .IsBase64) }}{{/* TODO: IsStream and CustomFormattershould be mutually exclusive in type resolver */}}
+ // TODO: context validating custom formatter items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{/*
+ {{ template "validationCustomformat" . }}
+ */}}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicecontextvalidator" . }}
+ {{- else if and .IsMap (not .IsInterface) }}
+ {{ template "mapcontextvalidator" . }}
+ {{- else if and .IsMap .IsInterface }}
+ {{ if .Enum }}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- else if or .IsComplexObject .IsTuple .IsAdditionalProperties .IsAliased }}
+ {{- if not .IsAnonymous }}
+ if val, ok := {{ $validatedValues }}[{{ $keyVar }}]; ok {
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if val != nil {
+ {{- end }}
+ if err := val.ContextValidate(ctx, formats); err != nil {
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ }
+ {{- else }}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{- end }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ // TODO: context validating additional items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{ end }}
+ {{- end }}
+ }
+ {{ end }}
+ {{ end }}
+ {{- else if .IsAliased }}
+ {{- if and .IsMap .HasValidations }}{{/* validation of aliased maps but does not know about AdditionalProperties: e.g. it comes from a $ref */}}
+ {{- if not .IsAnonymous }}
+ {{- if $.IsMap }}{{/* we come from a map range */}}
+ if val, ok := {{ .ValueExpression }}; ok {
+ {{- end }}
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ {{- if $.IsMap }}
+ if val != nil {
+ {{- else }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ {{- end }}
+ if err := {{ if $.IsMap }}val{{ else }}{{ .ValueExpression }}{{ end }}.ContextValidate(ctx, formats); err != nil {
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ {{- if or $.IsMap }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{ end }} {{/*mapcontextvalidator*/}}
+{{ define "mapvalidator" }}{{/* validates additionalProperties */}}
+ {{- if and .Required }}
+ {{- if or .IsNullable .IsInterface }}
+ if {{ .ReceiverName }}.{{ pascalize .Name }} == nil {
+ return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, nil)
+ }
+ {{- else }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{ .ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{ if .HasAdditionalProperties }}
+ {{- if and .AdditionalProperties.HasValidations (not .AdditionalProperties.SkipExternalValidation) }}
+ {{- $validatedValues := .ValueExpression }}{{ $keyVar := .AdditionalProperties.KeyVar }}
+ for {{ .AdditionalProperties.KeyVar }} := range {{ .ValueExpression }} {
+ {{ with .AdditionalProperties }}
+ {{- if and (not .Required) .IsNullable }}{{/* skip when nul type is accepted */}}
+ {{- if .IsInterface }}
+ if {{ $validatedValues }}[{{ $keyVar }}] == nil { // not required
+ {{- else }}
+ if swag.IsZero({{ $validatedValues }}[{{ $keyVar }}]) { // not required
+ {{- end }}
+ continue
+ }
+ {{- else if and (.Required) (not .IsArray) }}{{/* Required slice is processed below */}}
+ if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and .IsPrimitive (not .SkipExternalValidation ) }}
+ {{- if .IsAliased }}
+ {{- if not .IsAnonymous }}
+ if val, ok := {{ $validatedValues }}[{{ $keyVar }}]; ok {
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if val != nil {
+ {{- end }}
+ if err := val.Validate(formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ }
+ {{- else }}{{/* validation of anonymous objects */}}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ // TODO: validating additional items should go here, if you see this raise an issue{{/* TODO(fred): investigate the case to remove that comment: AdditionalItems shouldn't come in maps. Upstream validation is needed to guard against this */}}
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{ end }}
+ {{- else }}
+ {{ template "primitivefieldvalidator" . }}
+ {{- end }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ {{- if .Required }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and (not .IsStream) (not .IsBase64) (not .SkipExternalValidation) }}{{/* TODO: IsStream and CustomFormattershould be mutually exclusive in type resolver */}}
+ {{ template "validationCustomformat" . }}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicevalidator" . }}
+ {{- else if and .IsMap (not .IsInterface) }}
+ {{ template "mapvalidator" . }}
+ {{ if .Enum }}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- else if and .IsMap .IsInterface }}
+ {{ if .Enum }}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- else if or .IsComplexObject .IsTuple .IsAdditionalProperties .IsAliased }}
+ {{- if and (not .IsAnonymous) (not .SkipExternalValidation) }}
+ if val, ok := {{ $validatedValues }}[{{ $keyVar }}]; ok {
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if val != nil {
+ {{- end }}
+ if err := val.Validate(formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ }
+ {{- else }}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{- end }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ // TODO: validating additional items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{ end }}
+ {{ end }}
+ }
+ {{- end }}
+ {{ end }}
+ {{ if .Enum }}
+ // from map
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Enum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{- else if .IsAliased }}
+ {{- if and .IsMap .HasValidations (not .SkipExternalValidation) }}{{/* validation of aliased maps but does not know about AdditionalProperties: e.g. it comes from a $ref */}}
+ {{- if not .IsAnonymous }}
+ {{- if $.IsMap }}{{/* we come from a map range */}}
+ if val, ok := {{ .ValueExpression }}; ok {
+ {{- end }}
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ {{- if $.IsMap }}
+ if val != nil {
+ {{- else }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ {{- end }}
+ if err := {{ if $.IsMap }}val{{ else }}{{ .ValueExpression }}{{ end }}.Validate(formats); err != nil {
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ {{- if or $.IsMap }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{ end }}
+
+{{define "objectcontextvalidator" }}
+ {{/* Debug
+ // .Name: {{ .Name }}
+ // .IsAliased: {{ .IsAliased }}
+ // .IsAnonymous: {{ .IsAnonymous }}
+ // .IsNullable: {{ .IsNullable }}
+ // .Required: {{ .Required }}
+ // .ReadOnly: {{ .ReadOnly }}
+ // .HasContextValidations {{ .HasContextValidations }}
+ // .IsBaseType: {{ .IsBaseType }}
+ // .ValueExpression: {{ .ValueExpression }}
+ // .ReceiverName: {{ .ReceiverName }}
+ */}}
+ {{- if not .IsAnonymous }}
+ {{- if or .IsAliased (ne .ValueExpression .ReceiverName) }}{{/* prevents generated code to call itself: case of aliased types */}}
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ {{ if not .Required }}
+ {{- if .IsInterface }}
+ if {{ .ValueExpression }} == nil { // not required
+ {{- else }}
+ if swag.IsZero({{ .ValueExpression }}) { // not required
+ {{- end }}
+ return nil
+ }
+ {{ end }}
+ if err := {{.ValueExpression }}.ContextValidate(ctx, formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{- if and .IsTuple .AdditionalItems }}
+ // TODO: context validating additional items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{- end }}
+{{ end }}
+{{ define "minmaxProperties" }}
+ {{- if and (or .IsMap (and .IsAdditionalProperties .HasAdditionalProperties)) (or .MinProperties .MaxProperties) }}
+ {{- if and (not .IsAdditionalProperties) (not .IsInterface) (eq (len .Properties) 0) }}{{/* map only */}}
+ nprops := len({{ if and (not .IsAliased) .HasAdditionalProperties }}{{ .ReceiverName }}{{ else }}{{ .ValueExpression }}{{ end }})
+ {{- else }}{{/* object with properties */}}
+ {{- if and .IsNullable .MinProperties }}
+ {{- if gt0 .MinProperties }}
+
+ // short circuits minProperties > 0
+ if {{ .ReceiverName }} == nil {
+ return errors.TooFewProperties({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .MinProperties }})
+ }
+ {{- end }}
+ {{- end }}
+
+ props := make(map[string]json.RawMessage, {{ len .Properties }}{{ if .HasAdditionalProperties }}+ 10{{ end }})
+ j, err := swag.WriteJSON({{ .ReceiverName }})
+ if err != nil {
+ return err
+ }
+
+ if err = swag.ReadJSON(j, &props) ; err != nil {
+ return err
+ }
+
+ nprops := len(props)
+ {{- end }}
+ {{ if .MinProperties }}
+ // minProperties: {{ .MinProperties }}
+ if nprops < {{ .MinProperties }} {
+ return errors.TooFewProperties({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .MinProperties }})
+ }
+ {{- end }}
+ {{ if .MaxProperties }}
+ // maxProperties: {{ .MaxProperties }}
+ if nprops > {{ .MaxProperties }} {
+ return errors.TooManyProperties({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .MaxProperties }})
+ }
+ {{- end }}
+ {{- end }}
+{{- end }}
+{{define "objectvalidator" }}{{/* // DEBUG
+ // .Name: {{ .Name }}
+ // .IsAliased: {{ .IsAliased }}
+ // .IsAnonymous: {{ .IsAnonymous }}
+ // .IsNullable: {{ .IsNullable }}
+ // .Required: {{ .Required }}
+ // .ReadOnly: {{ .ReadOnly }}
+ // .HasValidations {{ .HasValidations }}
+ // .HasContextValidations {{ .HasContextValidations }}
+ // .IsBaseType: {{ .IsBaseType }}
+ // .ValueExpression: {{ .ValueExpression }}
+ // .ReceiverName: {{ .ReceiverName }}
+ // .IsAdditionalProperties: {{ .IsAdditionalProperties }}
+ // .IsInterface: {{ .IsInterface }}
+ // .IsMap: {{ .IsMap }}
+ // .IsArray: {{ .IsArray }}
+ // .IsMapNullOverride: {{ .IsMapNullOverride }}
+ */}}
+ {{- if not .IsAnonymous }}
+ {{- if and .Required (or .IsNullable .IsBaseType .IsMap) }}
+ if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil {
+ return err
+ }
+ {{- if and (not .Required) .IsBaseType }}
+ if {{ .ValueExpression }} == nil {
+ return nil
+ }
+ {{- end }}
+ {{ end }}
+ {{- if and (or .IsAliased (ne .ValueExpression .ReceiverName)) (not .SkipExternalValidation) }}{{/* prevents generated code to call itself: case of aliased types */}}
+ {{- if or (and (or .IsNullable) (not .IsMapNullOverride)) .IsMap .IsArray }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ if err := {{.ValueExpression }}.Validate(formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if or (and (or .IsNullable) (not .IsMapNullOverride)) .IsMap .IsArray }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ {{ template "minmaxProperties" .}}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{- if and .IsTuple .AdditionalItems }}
+ // TODO: validating additional items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{- end }}
+{{ end }}
+
+{{define "propertycontextvalidator"}}
+ {{- if .IsPrimitive }}
+ {{- if .IsAliased }}
+ {{ template "objectcontextvalidator" . }}
+ {{- else }}
+ {{ template "primitivefieldcontextvalidator" . }}
+ {{- end }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ // TODO: context validating primitive with custom formatter should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{- if .ReadOnly }}
+
+ if err := validate.ReadOnly{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and (not .IsStream) (not .IsBase64) }}
+ // TODO: context validating properties with custom formatter should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{/*{{ template "validationCustomformat" . }}*/}}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicecontextvalidator" . }}
+ {{- else if and .IsMap (or (not .IsAliased) (and .IsAliased .IsInterface)) }}{{/* except for interface, the renderinf for aliased maps is performed by objectvalidator */}}
+ {{ template "mapcontextvalidator" . }}
+ {{- else if or .IsComplexObject .IsTuple .IsAdditionalProperties .IsAliased }}
+ {{ template "objectcontextvalidator" . }}
+ {{- end }}
+
+{{end}}
+
+{{define "propertyvalidator" }}
+ {{- if .IsPrimitive }}
+ {{- if .IsAliased }}
+ {{- if and .Required (not .IsAnonymous) }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{ template "objectvalidator" . }}
+ {{- else }}
+ {{ template "primitivefieldvalidator" . }}
+ {{- end }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ {{- if .Required }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and (not .IsStream) (not .IsBase64) }}
+ {{ template "validationCustomformat" . }}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicevalidator" . }}
+ {{- else if and .IsMap (or (not .IsAliased) (and .IsAliased .IsInterface)) }}
+ {{ template "minmaxProperties" . }}
+ {{ template "mapvalidator" . }}
+ {{- else if or .IsComplexObject .IsTuple .IsAdditionalProperties .IsAliased }}
+ {{- if and .IsAdditionalProperties .Required (not .IsAliased) }}
+ {{- if or .IsNullable .IsInterface }}
+ if {{ .ValueExpression }} == nil {
+ return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }})
+ }
+ {{- else }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{ template "objectvalidator" . }}
+ {{- else if and .IsExternal .Required }}
+ {{- if or .IsNullable .IsInterface }}
+ if {{ .ValueExpression }} == nil {
+ return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }})
+ }
+ {{- else }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+{{ end }}
+
+{{define "fieldcontextvalidator" }}
+ {{- if .IsPrimitive }}
+ {{ template "primitivefieldcontextvalidator" . }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ {{- if and (not .IsStream) (not .IsBase64) }}
+ // TODO: context validating properties with custom formatter should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{/*
+ {{ template "validationCustomformat" . }}
+ */}}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicecontextvalidator" . }}
+ {{- else if .IsMap }}
+ {{ template "mapcontextvalidator" . }}
+ {{- end }}
+
+{{ end }}
+
+{{ define "fieldvalidator"}}
+ {{- if .IsPrimitive }}
+ {{ template "primitivefieldvalidator" . }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ {{- if .Required }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and (not .IsStream) (not .IsBase64) }}
+ {{ template "validationCustomformat" . }}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicevalidator" . }}
+ {{- else if .IsMap }}
+ {{ template "mapvalidator" . }}
+ {{- end }}
+{{ end }}
+
+{{define "schemacontextvalidator" }}
+// ContextValidate validate this {{ humanize .Name }} based on the context it is used
+func ({{.ReceiverName }} {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ if .Discriminates }}{{ camelize .Name }}{{ else if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
+ var res []error
+ {{ range .AllOf }}
+ {{- if not .Properties }}
+ // validation for a type composition with {{ .GoType }}
+ {{- end }}
+ {{- if and (or .IsInterface .IsAnonymous .IsBaseType) (or .HasContextValidations) }}
+ {{ template "fieldcontextvalidator" . }}
+ {{ range .Properties }}
+ {{ if and (ne $.DiscriminatorField .Name) (or .HasContextValidations) }}
+ if err := {{.ReceiverName }}.contextValidate{{ pascalize .Name }}(ctx, formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }}
+ {{- if and .HasAdditionalProperties (not .IsMap) }}{{/* validates additionalProperties in an object which is not itself a map */}}
+ {{ template "mapcontextvalidator" . }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ // TODO: context validating additional items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{/*
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Items(formats); err != nil {
+ res = append(res, err)
+ }
+ */}}
+ {{ end }}
+ {{- else if (or .HasContextValidations) }}
+ if err := {{ .ReceiverName }}.{{ pascalize (dropPackage .GoType) }}.ContextValidate(ctx, formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }} {{/*end AllOf*/}}
+ {{ template "fieldcontextvalidator" . }}
+ {{ range .Properties }}
+ {{ if .HasContextValidations }} {{/* complex obj always has cv*/}}
+ if err := {{.ReceiverName }}.contextValidate{{ pascalize .Name }}(ctx, formats); err != nil {
+ res = append(res, err)
+ }
+ {{ end }}
+ {{ end }}
+ {{- if and .HasAdditionalProperties (not .IsMap) }}{{/* validates additionalProperties in an object which is not itself a map */}}
+ {{ template "mapcontextvalidator" . }}
+ {{- end }}
+ if len(res) > 0 {
+ return errors.CompositeValidationError(res...)
+ }
+ return nil
+}
+
+
+ {{ range .Properties }}
+ {{ if .HasContextValidations }}
+func ({{.ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) contextValidate{{ pascalize .Name }}(ctx context.Context, formats strfmt.Registry) error {
+ {{template "propertycontextvalidator" . }}
+ return nil
+}
+ {{ end }}
+ {{ end }} {{/*Properties*/}}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ if .HasContextValidations }}
+func ({{.ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) contextValidate{{ pascalize .Name }}(ctx context.Context, formats strfmt.Registry) error {
+ {{template "propertycontextvalidator" . }}
+ return nil
+}
+ {{ end }}
+ {{ end }}
+ {{ end }} {{/*AllOf*/}}
+{{end}} {{/*schemacontextvalidator*/}}
+
+{{define "schemavalidator" }}
+ {{ if .Enum }}
+ {{ if (eq .SwaggerType "string") }}
+ {{ $gotype := .GoType }}
+const (
+ {{ range .Enum }}
+ {{- $variant := print $gotype (pascalize (cleanupEnumVariant .)) }}
+ // {{ $variant }} captures enum value {{ printf "%q" . }}
+ {{ $variant }} {{ $gotype }} = {{ printf "%q" . }}
+ {{ end }}
+)
+ {{ end }}
+
+// for schema
+var {{ camelize .Name }}Enum []interface{}
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize .Name }}Enum = append({{ camelize .Name }}Enum, v)
+ }
+}
+
+func ({{ .ReceiverName }} {{ if not .IsPrimitive }}*{{ end }}{{ if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) validate{{ pascalize .Name }}Enum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize .Name }}Enum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ if .ItemsEnum }}
+var {{ camelize .Name }}ItemsEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" .Items }}
+ if err := json.Unmarshal([]byte(`{{ json .ItemsEnum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize .Name }}ItemsEnum = append({{ camelize .Name }}ItemsEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ItemsEnum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .Items }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize .Name }}ItemsEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ with .AdditionalProperties }}
+ {{ if .Enum }}
+// for additional props
+var {{ camelize .Name }}ValueEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize .Name }}ValueEnum = append({{ camelize .Name }}ValueEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) validate{{ pascalize .Name }}ValueEnum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize .Name }}ValueEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{- end }}
+ {{ end }}
+// Validate validates this {{ humanize .Name }}
+func ({{.ReceiverName }} {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ if .Discriminates }}{{ camelize .Name }}{{ else if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) Validate(formats strfmt.Registry) error {
+ var res []error
+ {{ template "minmaxProperties" .}}
+ {{ range .AllOf }}
+ {{- if not .Properties }}
+ // validation for a type composition with {{ .GoType }}
+ {{- end }}
+ {{- if and (or .IsInterface .IsAnonymous .IsBaseType) (or .Required .HasValidations) }}
+ {{ template "fieldvalidator" . }}
+
+ {{ range .Properties }}
+ {{ if and (ne $.DiscriminatorField .Name) (or .Required .HasValidations) }}
+ if err := {{.ReceiverName }}.validate{{ pascalize .Name }}(formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }}
+ {{- if and .HasAdditionalProperties (not .IsMap) }}{{/* validates additionalProperties in an object which is not itself a map */}}
+ {{ template "mapvalidator" . }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Items(formats); err != nil {
+ res = append(res, err)
+ }
+ {{ end }}
+ {{- else if (or .Required .HasValidations) }}
+ if err := {{ .ReceiverName }}.{{ pascalize (dropPackage .GoType) }}.Validate(formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }}
+ {{ template "fieldvalidator" . }}
+ {{ range .Properties }}
+ {{ if and (ne $.DiscriminatorField .Name) (or .Required .HasValidations) }}
+ if err := {{.ReceiverName }}.validate{{ pascalize .Name }}(formats); err != nil {
+ res = append(res, err)
+ }
+ {{ end }}
+ {{ end }}
+ {{- if and .HasAdditionalProperties (not .IsMap) }}{{/* validates additionalProperties in an object which is not itself a map */}}
+ {{ template "mapvalidator" . }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}{{/* validates additionalItems in a tuple */}}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Items(formats); err != nil {
+ res = append(res, err)
+ }
+ {{ end }}
+ {{ if and .Enum (not .IsPrimitive) (not .IsMap) }}
+ // value enum
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Enum("", "body", {{ .ReceiverName }}); err != nil {
+ res = append(res, err)
+ }
+ {{ end }}
+
+ if len(res) > 0 {
+ return errors.CompositeValidationError(res...)
+ }
+ return nil
+}
+
+ {{ range .Properties }}
+ {{ if or .Required .HasValidations }}
+ {{ if .Enum }}
+var {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum = append({{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, v)
+ }
+}
+
+ {{ if (eq .SwaggerType "string") }}
+ {{ $gotype := .GoType }}
+ {{ $propname := .Name }}
+const (
+ {{ range .Enum }}
+ {{- $variant := print (pascalize $.Name) (pascalize $propname) (pascalize (cleanupEnumVariant .)) }}
+ // {{ $variant }} captures enum value {{ printf "%q" . }}
+ {{ $variant }} {{ $gotype }} = {{ printf "%q" . }}
+ {{ end }}
+)
+ {{ end }}
+
+// prop value enum
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}Enum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ if .ItemsEnum }}
+var {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum []interface{}
+func init() {
+ var res []{{ template "dereffedSchemaType" .Items }}
+ if err := json.Unmarshal([]byte(`{{ json .ItemsEnum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum = append({{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ItemsEnum(path, location string, value {{ if or .Items.IsTuple .Items.IsComplexObject .Items.IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .Items }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ if .AdditionalItems }}
+ {{ if .AdditionalItems.Enum }}
+var {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" .AdditionalItems }}
+ if err := json.Unmarshal([]byte(`{{ json .AdditionalItems.Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum = append({{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}Enum(path, location string, value {{ if or .AdditionalItems.IsTuple .AdditionalItems.IsComplexObject .AdditionalItems.IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .AdditionalItems }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ end }}
+ {{ with .AdditionalProperties }}
+ {{ if .Enum }}
+// additional properties value enum
+var {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum = append({{ camelize $.Name }}{{ pascalize .Name }}ValueEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ValueEnum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ end }}
+
+ {{ if and (ne $.DiscriminatorField .Name) (or .Required .HasValidations) }}
+func ({{.ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}(formats strfmt.Registry) error {
+ {{- if not .Required }}
+ {{- if .IsInterface }}
+ if .ValueExpression == nil { // not required
+ {{- else }}
+ if swag.IsZero({{ .ValueExpression }}) { // not required
+ {{- end }}
+ return nil
+ }
+ {{- end }}
+ {{- if and $.IsTuple .IsMap .Required }}
+ {{- if .IsInterface }}
+ if {{ .ValueExpression }} == nil {
+ return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }})
+ }
+ {{- else }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}(
+ {{- if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }},
+ {{- if and (eq .GoType "string") (not (or .IsAnonymous .IsNullable)) }}{{ .GoType }}({{ end }}
+ {{- .ValueExpression }}
+ {{- if and (eq .GoType "string") (not (or .IsAnonymous .IsNullable)) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{template "propertyvalidator" . }}
+
+ return nil
+}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ if and (ne $.DiscriminatorField .Name) (or .Required .HasValidations) }}
+ {{ if .Enum }}
+var {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum = append({{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, v)
+ }
+}
+
+// property enum
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}Enum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ if .ItemsEnum }}
+var {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" .Items }}
+ if err := json.Unmarshal([]byte(`{{ json .ItemsEnum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum = append({{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ItemsEnum(path, location string, value {{ if or .Items.IsTuple .Items.IsComplexObject .Items.IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .Items }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ if .AdditionalItems }}
+ {{ if .AdditionalItems.Enum }}
+var {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" .AdditionalItems }}
+ if err := json.Unmarshal([]byte(`{{ json .AdditionalItems.Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum = append({{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}Enum(path, location string, value {{ if or .AdditionalItems.IsTuple .AdditionalItems.IsComplexObject .AdditionalItems.IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .AdditionalItems }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ end }}
+ {{ with .AdditionalProperties }}
+ {{ if .Enum }}
+var {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum []interface{}
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum = append({{ camelize $.Name }}{{ pascalize .Name }}ValueEnum, v)
+ }
+}
+
+// additional properties value enum
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ValueEnum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ end }}
+
+
+func ({{.ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}(formats strfmt.Registry) error {
+ {{ if not .Required }}
+ {{- if .IsInterface }}
+ if {{ .ValueExpression }} == nil { // not required
+ {{- else }}
+ if swag.IsZero({{ .ValueExpression }}) { // not required
+ {{- end }}
+ return nil
+ }
+ {{ end }}
+ {{template "propertyvalidator" . }}
+
+ return nil
+}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+
+ {{ if .HasAdditionalItems }}
+ {{ if .AdditionalItems.Enum }}
+var {{ camelize .Name }}ItemsEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" .AdditionalItems }}
+ if err := json.Unmarshal([]byte(`{{ json .AdditionalItems.Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize .Name }}ItemsEnum = append({{ camelize .Name }}ItemsEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ItemsEnum(path, location string, value {{ if or .AdditionalItems.IsTuple .AdditionalItems.IsComplexObject .AdditionalItems.IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .AdditionalItems }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize .Name }}ItemsEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+func ({{.ReceiverName }} *{{ pascalize .Name }}) validate{{ pascalize .Name }}Items(formats strfmt.Registry) error {
+ {{ if and (or .AdditionalItems.Required .AdditionalItems.HasValidations) (not .AdditionalItems.SkipExternalValidation) }}
+ for {{ .IndexVar }} := range {{ .ValueExpression }}.{{ pascalize .Name }}Items {
+ {{template "propertyvalidator" .AdditionalItems }}
+ }
+ {{ end }}
+ return nil
+}
+ {{ end }}
+{{ end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/additionalpropertiesserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/additionalpropertiesserializer.gotmpl
new file mode 100644
index 000000000..a09058683
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/additionalpropertiesserializer.gotmpl
@@ -0,0 +1,94 @@
+{{ define "additionalPropertiesSerializer" }}
+// UnmarshalJSON unmarshals this object with additional properties from JSON
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(data []byte) error {
+ // stage 1, bind the properties
+ var stage1 {{ template "withoutAdditionalBody" . }}
+ if err := json.Unmarshal(data, &stage1); err != nil {
+ return err
+ }
+ var rcv {{ pascalize .Name }}
+ {{ range .Properties }}
+ rcv.{{ pascalize .Name }} = stage1.{{ pascalize .Name }}
+ {{- end }}
+ *{{ .ReceiverName }} = rcv
+
+ // stage 2, remove properties and add to map
+ stage2 := make(map[string]{{ if .AdditionalProperties }}json.RawMessage{{ else }}interface{}{{ end }})
+ if err := json.Unmarshal(data, &stage2); err != nil {
+ return err
+ }
+
+ {{ range .Properties }}
+ delete(stage2, {{ printf "%q" .Name }})
+ {{- end }}
+
+ {{- if .AdditionalProperties }}
+ // stage 3, add additional properties values
+ if len(stage2) > 0 {
+ result := make(map[string]{{ template "schemaType" .AdditionalProperties }})
+ for k, v := range stage2 {
+ var toadd {{ template "schemaType" .AdditionalProperties }}
+ if err := json.Unmarshal(v, {{if not .AdditionalProperties.IsNullable }}&{{ end }}toadd); err != nil {
+ return err
+ }
+ result[k] = toadd
+ }
+ {{ .ValueExpression }} = result
+ }
+ {{- else }}
+ {{ .ValueExpression }} = stage2
+ {{- end }}
+
+ return nil
+}
+
+// MarshalJSON marshals this object with additional properties into a JSON object
+func ({{.ReceiverName}} {{ pascalize .Name }}) MarshalJSON() ([]byte, error) {
+ var stage1 {{ template "withoutAdditionalBody" . }}
+ {{ range .Properties }}
+ stage1.{{ pascalize .Name }} = {{ .ValueExpression }}
+ {{- end }}
+
+ // make JSON object for known properties
+ props, err := json.Marshal(stage1)
+ if err != nil {
+ return nil, err
+ }
+
+ if len({{ .ValueExpression }}) == 0 { // no additional properties
+ return props, nil
+ }
+
+ // make JSON object for the additional properties
+ additional, err := json.Marshal({{ .ValueExpression }})
+ if err != nil {
+ return nil, err
+ }
+
+ if len(props) < 3 { // "{}": only additional properties
+ return additional, nil
+ }
+
+ // concatenate the 2 objects
+ return swag.ConcatJSON(props, additional), nil
+}
+{{- end }}
+
+{{ define "noAdditionalPropertiesSerializer" }}
+// UnmarshalJSON unmarshals this object while disallowing additional properties from JSON
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(data []byte) error {
+ var props {{ template "withoutAdditionalBody" . }}
+
+ dec := json.NewDecoder(bytes.NewReader(data))
+ dec.DisallowUnknownFields()
+ if err := dec.Decode(&props); err != nil {
+ return err
+ }
+
+ {{- $rcv := .ReceiverName }}
+ {{ range .Properties }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = props.{{ pascalize .Name }}
+ {{- end }}
+ return nil
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/aliasedserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/aliasedserializer.gotmpl
new file mode 100644
index 000000000..efdf2718a
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/aliasedserializer.gotmpl
@@ -0,0 +1,11 @@
+{{ define "aliasedSerializer" }}
+// UnmarshalJSON sets a {{ pascalize .Name }} value from JSON input
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(b []byte) error {
+ return ((*{{ .AliasedType }})({{ .ReceiverName}})).UnmarshalJSON(b)
+}
+
+// MarshalJSON retrieves a {{ pascalize .Name }} value as JSON output
+func ({{.ReceiverName}} {{ pascalize .Name }}) MarshalJSON() ([]byte, error) {
+ return ({{ .AliasedType }}({{ .ReceiverName}})).MarshalJSON()
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/allofserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/allofserializer.gotmpl
new file mode 100644
index 000000000..4359faa7f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/allofserializer.gotmpl
@@ -0,0 +1,180 @@
+{{ define "allOfSerializer" }}
+ {{- $receiverName := .ReceiverName }}
+// UnmarshalJSON unmarshals this object from a JSON structure
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(raw []byte) error {
+ {{- range .AllOf }}
+ // {{ pascalize .Name }}
+ {{- if and .IsAnonymous .Properties }}{{/* unmarshalling properties in all of anonymous objects */}}
+ {{- $part := pascalize .Name }}
+ var data{{ $part }} struct {
+ {{- range .Properties }}
+ {{- if not .IsBaseType }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{ else }}
+ {{ if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{ else }}
+ {{ pascalize .Name}} json.RawMessage `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end }}
+ {{ end }}
+ {{- end }}
+ {{- if .HasAdditionalProperties }}
+ {{ pascalize .AdditionalProperties.Name }}{{ if not .IsExported }}Field{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{- end }}
+ {{- if .AdditionalItems }}
+ {{ pascalize .AdditionalItems.Name }}{{ if or (not .IsExported) .IsSubType }}Field{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{- end }}
+ }
+ if err := swag.ReadJSON(raw, &data{{ $part }}); err != nil {
+ return err
+ }
+ {{ range .Properties }}
+ {{ $receiverName }}.{{ pascalize .Name }} = data{{ $part }}.{{ pascalize .Name }}
+ {{ end }}
+ {{- else if .IsAnonymous }}
+ var {{ varname .Name }} {{ .GoType }}
+ if err := {{ if .IsBaseType}}Unmarshal{{ .GoType }}(bytes.NewBuffer(raw), &{{ varname .Name }}){{ else }} swag.ReadJSON(raw, &{{ varname .Name }}){{ end }}; err != nil {
+ return err
+ }
+ {{ .ValueExpression }} = {{ varname .Name }}
+ {{- end }}
+ {{- if not .IsAnonymous }}{{/* unmarshalling allOf named objects */}}
+ var {{ varname .Name }} {{ .GoType }}
+ if err := {{ if .IsBaseType}}Unmarshal{{ .GoType }}(bytes.NewBuffer(raw), &{{ varname .Name }}){{ else }} swag.ReadJSON(raw, &{{ varname .Name }}){{ end }}; err != nil {
+ return err
+ }
+ {{ .ReceiverName }}.{{ dropPackage .GoType }} = {{ varname .Name }}
+ {{ end }}
+ {{ end }}
+ {{- if .Properties }}
+ // now for regular properties
+ {{- $part := pascalize .Name }}
+ var props{{ $part }} struct {
+ {{- range .Properties }}
+ {{- if not .IsBaseType }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- else }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} json.RawMessage `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- end }}
+ {{ end }}
+ }
+ if err := swag.ReadJSON(raw, &props{{ $part }}); err != nil {
+ return err
+ }
+ {{- range .Properties }}
+ {{ $receiverName }}.{{ pascalize .Name }} = props{{ $part }}.{{ pascalize .Name }}
+ {{ end }}
+ {{- end }}
+ {{ if .HasAdditionalProperties }}
+ // TODO: AdditionalProperties
+ {{- end }}
+ {{- if .AdditionalItems }}
+ // TODO: AdditionalItems
+ {{- end }}
+ return nil
+}
+
+// MarshalJSON marshals this object to a JSON structure
+func ({{.ReceiverName}} {{ pascalize .Name }}) MarshalJSON() ([]byte, error) {
+ _parts := make([][]byte, 0, {{ len .AllOf }})
+ {{ range .AllOf }}
+ {{- if and .IsAnonymous .Properties }}
+ {{- $part := pascalize .Name }}
+ var data{{ $part }} struct {
+ {{- range .Properties }}
+ {{- if not .IsBaseType }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- else }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} json.RawMessage `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- end }}
+ {{ end }}
+ {{- if .HasAdditionalProperties }}
+ {{ pascalize .AdditionalProperties.Name }}{{ if not .IsExported }}Field{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{- end }}
+ {{- if .AdditionalItems }}
+ {{ pascalize .AdditionalItems.Name }}{{ if or (not .IsExported) .IsSubType }}Field{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{- end }}
+ }
+
+ {{ range .Properties }}
+ data{{ $part }}.{{ pascalize .Name }} = {{ $receiverName }}.{{ pascalize .Name }}
+ {{ end }}
+
+ jsonData{{ $part }}, err{{ $part }} := swag.WriteJSON(data{{ $part }})
+ if err{{ $part }} != nil {
+ return nil, err{{ $part }}
+ }
+ _parts = append(_parts, jsonData{{ $part }})
+ {{- else if .IsAnonymous }}{{/* unmarshalling anonymous type composition */}}
+ {{ varname .Name }}, err := swag.WriteJSON({{ .ValueExpression }})
+ if err != nil {
+ return nil, err
+ }
+ _parts = append(_parts, {{ varname .Name }})
+ {{- end }}
+ {{- if not .IsAnonymous }}
+
+ {{ varname .Name }}, err := swag.WriteJSON({{ $receiverName }}.{{ dropPackage .GoType }})
+ if err != nil {
+ return nil, err
+ }
+ _parts = append(_parts, {{ varname .Name }})
+ {{- end }}
+ {{- end }}
+ {{- if .Properties }}
+
+ // now for regular properties
+ {{- $part := pascalize .Name }}
+ var props{{ $part }} struct {
+ {{- range .Properties }}
+ {{- if not .IsBaseType }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- else }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} json.RawMessage `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- end }}
+ {{ end }}
+ }
+ {{- range .Properties }}
+ props{{ $part }}.{{ pascalize .Name }} = {{ $receiverName }}.{{ pascalize .Name }}
+ {{ end }}
+ jsonDataProps{{ $part }}, err{{ $part }} := swag.WriteJSON(props{{ $part }})
+ if err{{ $part }} != nil {
+ return nil, err{{ $part }}
+ }
+ _parts = append(_parts, jsonDataProps{{ $part }})
+ {{- end }}
+ {{- if .HasAdditionalProperties }}
+ {{- end }}
+ {{- if .HasAdditionalItems }}
+ {{- end }}
+ return swag.ConcatJSON(_parts...), nil
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/basetypeserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/basetypeserializer.gotmpl
new file mode 100644
index 000000000..5a7e9f44c
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/basetypeserializer.gotmpl
@@ -0,0 +1,69 @@
+{{ define "polymorphicSerializer" }}
+// Unmarshal{{ pascalize .Name }}Slice unmarshals polymorphic slices of {{ pascalize .Name }}
+func Unmarshal{{ pascalize .Name }}Slice(reader io.Reader, consumer runtime.Consumer) ([]{{ pascalize .Name }}, error) {
+ var elements []json.RawMessage
+ if err := consumer.Consume(reader, &elements); err != nil {
+ return nil, err
+ }
+
+ var result []{{ pascalize .Name }}
+ for _, element := range elements {
+ obj, err := unmarshal{{ pascalize .Name }}(element, consumer)
+ if err != nil {
+ return nil, err
+ }
+ result = append(result, obj)
+ }
+ return result, nil
+}
+
+// Unmarshal{{ pascalize .Name }} unmarshals polymorphic {{ pascalize .Name }}
+func Unmarshal{{ pascalize .Name }}(reader io.Reader, consumer runtime.Consumer) ({{ pascalize .Name }}, error) {
+ // we need to read this twice, so first into a buffer
+ data, err := io.ReadAll(reader)
+ if err != nil {
+ return nil, err
+ }
+ return unmarshal{{ pascalize .Name }}(data, consumer)
+}
+
+func unmarshal{{ pascalize .Name }}(data []byte, consumer runtime.Consumer) ({{ pascalize .Name }}, error) {
+ buf := bytes.NewBuffer(data)
+ {{ if .Discriminates }} buf2 := bytes.NewBuffer(data) {{ end }}
+
+ // the first time this is read is to fetch the value of the {{ .DiscriminatorField }} property.
+ var getType struct { {{ pascalize .DiscriminatorField }} string `json:{{ printf "%q" .DiscriminatorField }}` }
+ if err := consumer.Consume(buf, &getType); err != nil {
+ return nil, err
+ }
+
+ if err := validate.RequiredString({{ printf "%q" .DiscriminatorField }}, "body", getType.{{ pascalize .DiscriminatorField }}); err != nil {
+ return nil, err
+ }
+
+ // The value of {{ .DiscriminatorField }} is used to determine which type to create and unmarshal the data into
+ switch getType.{{ pascalize .DiscriminatorField }} {
+ {{- range $k, $v := .Discriminates }}
+ case {{ printf "%q" $k }}:
+ var result {{ if eq (upper (pascalize $.Name)) (upper $v) }}{{ camelize $.Name }}{{ else }}{{ $v }}{{ end }}
+ if err := consumer.Consume(buf2, &result); err != nil {
+ return nil, err
+ }
+ return &result, nil
+ {{- end }}
+ }
+ return nil, errors.New(422, "invalid {{ .DiscriminatorField }} value: %q", getType.{{ pascalize .DiscriminatorField }})
+}
+{{- end }}
+
+{{ define "baseTypeSerializer" }}
+// Unmarshal{{ pascalize .Name }} unmarshals polymorphic {{ pascalize .Name }}
+func Unmarshal{{ pascalize .Name }}(reader io.Reader, consumer runtime.Consumer) ({{ pascalize .Name }}, error) {
+ return Unmarshal{{ pascalize .GoType }}(reader, consumer)
+}
+
+// Unmarshal{{ pascalize .Name }}Slice unmarshals polymorphic slices of {{ pascalize .Name }}
+func Unmarshal{{ pascalize .Name }}Slice(reader io.Reader, consumer runtime.Consumer) ([]{{ pascalize .Name }}, error) {
+ return Unmarshal{{ pascalize .GoType }}Slice(reader, consumer)
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/marshalbinaryserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/marshalbinaryserializer.gotmpl
new file mode 100644
index 000000000..17c36cd06
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/marshalbinaryserializer.gotmpl
@@ -0,0 +1,19 @@
+{{ define "marshalBinarySerializer" }}
+// MarshalBinary interface implementation
+func ({{.ReceiverName}} *{{ pascalize .Name }}) MarshalBinary() ([]byte, error) {
+ if {{ .ReceiverName }} == nil {
+ return nil, nil
+ }
+ return swag.WriteJSON({{ .ReceiverName }})
+}
+
+// UnmarshalBinary interface implementation
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalBinary(b []byte) error {
+ var res {{ pascalize .Name }}
+ if err := swag.ReadJSON(b, &res); err != nil {
+ return err
+ }
+ *{{ .ReceiverName }} = res
+ return nil
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/schemaserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/schemaserializer.gotmpl
new file mode 100644
index 000000000..76d814779
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/schemaserializer.gotmpl
@@ -0,0 +1,15 @@
+{{ define "schemaSerializer" }}{{/* switches to the appropriate serializer for any given type */}}
+ {{- if and .IsSubType (not .HasBaseType) }}
+ {{ template "hasDiscriminatedSerializer" . }}
+ {{- else if .IsTuple }}
+ {{ template "tupleSerializer" . }}
+ {{- else if .HasBaseType }}
+ {{ template "hasDiscriminatedSerializer" . }}
+ {{- else if .IsAdditionalProperties }}
+ {{ template "additionalPropertiesSerializer" . }}
+ {{- else if and (gt (len .AllOf) 0) (not .IsSubType ) }}
+ {{ template "allOfSerializer" . }}
+ {{- else if and .IsComplexObject .StrictAdditionalProperties }}
+ {{ template "noAdditionalPropertiesSerializer" . }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/subtypeserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/subtypeserializer.gotmpl
new file mode 100644
index 000000000..b15613efc
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/subtypeserializer.gotmpl
@@ -0,0 +1,172 @@
+{{ define "hasDiscriminatedSerializer" }}
+// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(raw []byte) error {
+ var data {{ template "withoutBaseTypeBody" . }}
+ buf := bytes.NewBuffer(raw)
+ dec := json.NewDecoder(buf)
+ dec.UseNumber()
+
+ if err := dec.Decode(&data); err != nil {
+ return err
+ }
+ {{ if or .IsBaseType .IsSubType }}
+ var base {{ template "JustBaseTypeBody" . }}
+ buf = bytes.NewBuffer(raw)
+ dec = json.NewDecoder(buf)
+ dec.UseNumber()
+
+ if err := dec.Decode(&base); err != nil {
+ return err
+ }
+ {{- end }}
+
+ {{ range .AllOf }}
+ {{- if not .IsBaseType }}
+ {{ range .Properties }}
+ {{- if or .IsBaseType (not .IsExported) }}
+ {{- if not .Required }}
+ var allOf{{ pascalize .Name }} {{ if .IsArray }}[]{{ pascalize .Items.GoType }}{{ else }}{{ pascalize .GoType }}{{ end }}
+ if string(data.{{ pascalize .Name }}) != "null" {
+ {{ camelize .Name }}, err := Unmarshal{{ if .IsArray }}{{ pascalize .Items.GoType }}Slice{{ else }}{{ pascalize .GoType }}{{ end }}(bytes.NewBuffer(data.{{ pascalize .Name }}), runtime.JSONConsumer())
+ if err != nil && err != io.EOF {
+ return err
+ }
+ allOf{{ pascalize .Name }} = {{ camelize .Name }}
+ }
+ {{- else }}
+ allOf{{ pascalize .Name }}, err := Unmarshal{{ if .IsArray }}{{ pascalize .Items.GoType }}Slice{{ else }}{{ pascalize .GoType }}{{ end }}(bytes.NewBuffer(data.{{ pascalize .Name }}), runtime.JSONConsumer())
+ if err != nil && err != io.EOF {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{- if or .IsBaseType (not .IsExported) }}
+ {{- if not .Required }}
+ var prop{{ pascalize .Name }} {{ if .IsArray }}[]{{ pascalize .Items.GoType }}{{ else }}{{ pascalize .GoType }}{{ end }}
+ if string(data.{{ pascalize .Name }}) != "null" {
+ {{ camelize .Name }}, err := Unmarshal{{ if .IsArray }}{{ pascalize .Items.GoType }}Slice{{ else }}{{ pascalize .GoType }}{{ end }}(bytes.NewBuffer(data.{{ pascalize .Name }}), runtime.JSONConsumer())
+ if err != nil && err != io.EOF {
+ return err
+ }
+ prop{{ pascalize .Name }} = {{ camelize .Name }}
+ }
+ {{- else }}
+ prop{{ pascalize .Name }}, err := Unmarshal{{ if .IsArray }}{{ pascalize .Items.GoType }}Slice{{ else }}{{ pascalize .GoType }}{{ end }}(bytes.NewBuffer(data.{{ pascalize .Name }}), runtime.JSONConsumer())
+ if err != nil && err != io.EOF {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+
+ var result {{ pascalize .Name }}
+ {{ range $_, $parent := .AllOf }}
+ {{- if $parent.IsAnonymous }}
+ {{- if $parent.IsBaseType }}
+ {{ range $idx, $val := $parent.Properties }}
+ {{- if ne $parent.DiscriminatorField $val.Name }}
+ {{- if $val.IsExported }}
+ result.{{ camelize $val.Name }}Field = base.{{ pascalize $val.Name }}
+ {{- else }}
+ result.{{ camelize $val.Name }}Field = allOf{{ pascalize $val.Name }}
+ {{- end }}
+ {{- else }}
+ if base.{{ pascalize $val.Name }} != result.{{ pascalize $val.Name }}() {
+ /* Not the type we're looking for. */
+ return errors.New(422, "invalid {{$val.Name}} value: %q", base.{{ pascalize $val.Name }})
+ }
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ {{ range $idx, $val := $parent.Properties }}
+ {{- if $val.IsBaseType }}
+ result.{{ camelize $val.Name }}Field = allOf{{ pascalize $val.Name }}
+ {{- else }}
+ result.{{ pascalize $val.Name }} = data.{{ pascalize $val.Name }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ {{- if and $parent.IsBaseType $parent.IsExported }}
+ {{ range $idx, $val := $parent.Properties }}
+ {{- if ne $parent.DiscriminatorField $val.Name }}
+ {{- if $val.IsExported }}
+ result.{{ camelize $val.Name }}Field = base.{{ pascalize $val.Name }}
+ {{ else }}
+ result.{{ camelize $val.Name }}Field = allOf{{ pascalize $val.Name }}
+ {{- end }}
+ {{- else }}
+ if base.{{ pascalize $val.Name }} != result.{{ pascalize $val.Name }}() {
+ /* Not the type we're looking for. */
+ return errors.New(422, "invalid {{$val.Name}} value: %q", base.{{ pascalize $val.Name }})
+ }
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ result.{{ $parent.GoType }} = data.{{ $parent.GoType }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{ range .Properties }}
+ // {{ .Name }}
+ result.{{ if .IsBaseType }}{{ camelize .Name }}Field{{ else }}{{ pascalize .Name }}{{ end }} = {{ if .IsBaseType }}prop{{ pascalize .Name }}{{ else }}data.{{ pascalize .Name}}{{ end }}
+ {{ end }}
+ *{{ .ReceiverName }} = result
+
+ {{ if .IsAdditionalProperties }}
+ // Additional Properties: read raw, remove named properties, and add to map
+ rawProps := make(map[string]{{ if .AdditionalProperties }}json.RawMessage{{ else }}interface{}{{ end }})
+ if err := json.Unmarshal(raw, &rawProps); err != nil {
+ return err
+ }
+ {{ range .Properties }}
+ delete(rawProps, {{ printf "%q" .Name }})
+ {{- end }}
+ {{ if .AdditionalProperties }}
+ if len(rawProps) > 0 {
+ {{ .ValueExpression }} = make(map[string]{{ template "schemaType" .AdditionalProperties }})
+ for k, v := range rawProps {
+ var toadd {{ template "schemaType" .AdditionalProperties }}
+ if err := json.Unmarshal(v, {{if not .AdditionalProperties.IsNullable }}&{{ end }}toadd); err != nil {
+ return err
+ }
+ {{ .ValueExpression }}[k] = toadd
+ }
+ }
+ {{- else }}
+ {{ .ValueExpression }} = rawProps
+ {{- end }}
+ {{- end }}
+
+ return nil
+}
+
+// MarshalJSON marshals this object with a polymorphic type to a JSON structure
+func ({{.ReceiverName}} {{ pascalize .Name }}) MarshalJSON() ([]byte, error) { {{ $receiverName := .ReceiverName }}
+ var b1, b2, b3 []byte
+ var err error
+ b1, err = json.Marshal({{ template "withoutBaseTypeBodyOrNonExported" . }})
+ if err != nil {
+ return nil, err
+ }
+ b2, err = json.Marshal({{ template "withBaseTypeBodyAndNonExported" . }})
+ if err != nil {
+ return nil, err
+ }
+ {{ if .IsAdditionalProperties }}
+ if len({{ .ValueExpression }}) > 0 {
+ // make JSON object for the additional properties
+ b3, err = json.Marshal({{ .ValueExpression }})
+ if err != nil {
+ return nil, err
+ }
+ }
+ {{- end }}
+
+ return swag.ConcatJSON(b1, b2, b3), nil
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/tupleserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/tupleserializer.gotmpl
new file mode 100644
index 000000000..c05e844bb
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/tupleserializer.gotmpl
@@ -0,0 +1,66 @@
+{{ define "tupleSerializer" }}
+// UnmarshalJSON unmarshals this tuple type from a JSON array
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(raw []byte) error {
+ // stage 1, get the array but just the array
+ var stage1 []json.RawMessage
+ buf := bytes.NewBuffer(raw)
+ dec := json.NewDecoder(buf)
+ dec.UseNumber()
+
+ if err := dec.Decode(&stage1); err != nil {
+ return err
+ }
+
+ // stage 2: hydrates struct members with tuple elements
+ {{- if .AdditionalItems }}
+ var lastIndex int
+ {{ end }}
+ {{ range $idx, $val := .Properties }}if len(stage1) > {{ $idx }} {
+ var data{{ pascalize .Name }} {{ template "dereffedSchemaType" . }}
+ buf = bytes.NewBuffer(stage1[{{ $idx }}])
+ dec := json.NewDecoder(buf)
+ dec.UseNumber()
+ if err := dec.Decode(&data{{ pascalize .Name }}); err != nil {
+ return err
+ }
+ {{ .ReceiverName }}.{{ if .IsExported }}{{ pascalize .Name }}{{ else }}{{ camelize .Name }}{{ end }} = {{ if .IsNullable }}&{{ end }}data{{ pascalize .Name }}
+ {{ if $.AdditionalItems }}
+ lastIndex = {{ $idx }}
+ {{ end }}
+ }
+ {{ end }}
+ {{ if .AdditionalItems }}
+ // stage 3: hydrates AdditionalItems
+ if len(stage1) > lastIndex+1 {
+ for _, val := range stage1[lastIndex+1:] {
+ var toadd {{ template "schemaType" .AdditionalItems }}
+ buf = bytes.NewBuffer(val)
+ dec := json.NewDecoder(buf)
+ dec.UseNumber()
+ if err := dec.Decode({{ if not .AdditionalItems.IsNullable }}&{{ end }}toadd); err != nil {
+ return err
+ }
+ {{- with .AdditionalItems }}
+ {{ $.ValueExpression }}.{{- if .IsExported }}{{ pascalize .Name }}{{ else }}{{ camelize .Name }}{{ end }} = append({{ $.ValueExpression }}.{{- if .IsExported }}{{ pascalize .Name }}{{ else }}{{ camelize .Name }}{{ end }}, toadd)
+ {{- end }}
+ }
+ }
+ {{- end }}
+ return nil
+}
+
+// MarshalJSON marshals this tuple type into a JSON array
+func ({{.ReceiverName}} {{ pascalize .Name }}) MarshalJSON() ([]byte, error) {
+ data := []interface{}{
+ {{ range .Properties -}}
+ {{.ReceiverName}}.{{ pascalize .Name }},
+ {{- end }}
+ }
+ {{ with .AdditionalItems }}
+ for _, v := range {{ $.ValueExpression }}.{{ if .IsExported }}{{ pascalize .Name }}{{ else }}{{ camelize .Name }}{{ end }} {
+ data = append(data, v)
+ }
+ {{- end }}
+ return json.Marshal(data)
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/autoconfigureapi.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/autoconfigureapi.gotmpl
new file mode 100644
index 000000000..629b4b22b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/autoconfigureapi.gotmpl
@@ -0,0 +1,205 @@
+// Code generated by go-swagger; DO NOT EDIT.
+// Auto configures api handlers Implementations.
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+import (
+ "context"
+ "crypto/tls"
+ "io"
+ "log"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/runtime/security"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+{{ with .GenOpts }}
+//go:generate swagger generate server --target {{ .TargetPath }} --name {{ .Name }} --spec {{ .SpecPath }}
+{{- if .APIPackage }}{{ if ne .APIPackage "operations" }} --api-package {{ .APIPackage }}{{ end }}{{ end }}
+{{- if .ModelPackage }}{{ if ne .ModelPackage "models" }} --model-package {{ .ModelPackage }}{{ end }}{{ end }}
+{{- if .ServerPackage }}{{ if ne .ServerPackage "restapi"}} --server-package {{ .ServerPackage }}{{ end }}{{ end }}
+{{- if .ClientPackage }}{{ if ne .ClientPackage "client" }} --client-package {{ .ClientPackage }}{{ end }}{{ end }}
+{{- if .ImplementationPackage }} --implementation-package {{ .ImplementationPackage }}{{ end }}
+{{- if .TemplateDir }} --template-dir {{ .TemplateDir }}{{ end }}
+{{- range .Operations }} --operation {{ . }}{{ end }}
+{{- range .Tags }} --tags {{ . }}{{ end }}
+{{- if .Principal }} --principal {{ .Principal }}{{ end }}
+{{- if .DefaultScheme }}{{ if ne .DefaultScheme "http" }} --default-scheme {{ .DefaultScheme }}{{ end }}{{ end }}
+{{- range .Models }} --model {{ . }}{{ end }}
+{{- if or (not .IncludeModel) (not .IncludeValidator) }} --skip-models{{ end }}
+{{- if or (not .IncludeHandler) (not .IncludeParameters ) (not .IncludeResponses) }} --skip-operations{{ end }}
+{{- if not .IncludeSupport }} --skip-support{{ end }}
+{{- if not .IncludeMain }} --exclude-main{{ end }}
+{{- if .ExcludeSpec }} --exclude-spec{{ end }}
+{{- if .DumpData }} --dump-data{{ end }}
+{{- if .StrictResponders }} --strict-responders{{ end }}
+{{ end }}
+
+// This file auto configures the api backend implementation.
+// {{.ImplementationPackageAlias}} package must already exist.
+// {{.ImplementationPackageAlias}}.New() is implemented by user, and must return an object
+// or interface that implements Handler interface defined below.
+var Impl Handler = {{.ImplementationPackageAlias}}.New()
+
+// Handler handles all api server backend configurations and requests
+type Handler interface{
+{{- if .SecurityDefinitions }}
+ Authable
+{{- end }}
+ Configurable
+{{ range .OperationGroups -}}
+ {{ pascalize .Name }}Handler
+{{ end -}}
+}
+
+// Configurable handles all server configurations
+type Configurable interface {
+ ConfigureFlags(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API)
+ ConfigureTLS(tlsConfig *tls.Config)
+ ConfigureServer(s *http.Server, scheme, addr string)
+ CustomConfigure(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API)
+ SetupMiddlewares(handler http.Handler) http.Handler
+ SetupGlobalMiddleware(handler http.Handler) http.Handler
+}
+
+{{- if .SecurityDefinitions }}
+// Authable handles server authentication
+type Authable interface{
+ {{- range .SecurityDefinitions }}
+ {{- if .IsBasicAuth }}
+ // Applies when the Authorization header is set with the Basic scheme
+ {{ pascalize .ID }}Auth(user string, pass string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error)
+ {{- else if .IsAPIKeyAuth }}
+ // Applies when the "{{ .Name }}" {{ .Source }} is set
+ {{ pascalize .ID }}Auth(token string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error)
+ {{- else if .IsOAuth2 }}
+ {{ pascalize .ID }}Auth(token string, scopes []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error)
+ {{- end }}
+ {{- end }}
+}
+{{- end }}
+
+{{- $package := .Package }}
+{{- $apipackagealias := .APIPackageAlias }}
+{{ range .OperationGroups -}}
+/* {{ pascalize .Name }}Handler {{ .Description }} */
+type {{ pascalize .Name }}Handler interface {
+{{ range .Operations -}}
+ {{ if .Summary -}}
+ /* {{ pascalize .Name }} {{ .Summary }} */
+ {{ else if .Description -}}
+ /* {{ pascalize .Name }} {{ .Description }} */
+ {{ end -}}
+ {{ pascalize .Name }}(params {{ if ne .Package $package }}{{ .PackageAlias }}{{ else }}{{- $apipackagealias }}{{ end }}.
+ {{- pascalize .Name }}Params {{- if .Authorized}}, principal {{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}})
+ {{- if $.GenOpts.StrictResponders }} {{.Package}}.{{ pascalize .Name }}Responder {{ else }} middleware.Responder {{ end }}
+{{ end -}}
+}
+{{ end }}
+
+func configureFlags(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API) {
+ Impl.ConfigureFlags(api)
+}
+
+func configureAPI(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API) http.Handler {
+
+ api.ServeError = errors.ServeError
+
+ api.UseSwaggerUI()
+
+ {{ range .Consumes }}
+ {{- if .Implementation }}
+ api.{{ pascalize .Name }}Consumer = {{ .Implementation }}
+ {{- else }}
+ api.{{ pascalize .Name }}Consumer = runtime.ConsumerFunc(func(r io.Reader, target interface{}) error {
+ return Impl.{{ pascalize .Name }}Consume(r, target)
+ })
+ {{- end }}
+ {{- end }}
+ {{ range .Produces }}
+ {{- if .Implementation }}
+ api.{{ pascalize .Name }}Producer = {{ .Implementation }}
+ {{- else }}
+ api.{{ pascalize .Name }}Producer = runtime.ProducerFunc(func(w io.Writer, data interface{}) error {
+ return Impl.{{ pascalize .Name }}Produce(w, target)
+ })
+ {{- end }}
+ {{- end}}
+ {{ range .SecurityDefinitions }}
+ {{- if .IsBasicAuth }}
+ // Applies when the Authorization header is set with the Basic scheme
+ api.{{ pascalize .ID }}Auth = func(user string, pass string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return Impl.{{ pascalize .ID }}Auth(user, pass)
+ }
+
+ {{- else if .IsAPIKeyAuth }}
+ // Applies when the "{{ .Name }}" {{ .Source }} is set
+ api.{{ pascalize .ID }}Auth = func(token string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return Impl.{{ pascalize .ID }}Auth(token)
+ }
+ {{- else if .IsOAuth2 }}
+ api.{{ pascalize .ID }}Auth = func(token string, scopes []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return Impl.{{ pascalize .ID }}Auth(token, scopes)
+ }
+ {{- end }}
+ {{- end }}
+
+ {{- $package := .Package }}
+ {{- $apipackagealias := .APIPackageAlias }}
+ {{ range .Operations }}
+ api.{{ if ne .Package $package }}{{pascalize .Package}}{{ end }}{{ pascalize .Name }}Handler =
+ {{- if ne .Package $package }}
+ {{- .PackageAlias }}.{{- pascalize .Name }}HandlerFunc(func(params {{ .PackageAlias }}.{{- pascalize .Name }}Params
+ {{- else }}
+ {{- $apipackagealias }}.{{- pascalize .Name }}HandlerFunc(func(params {{ $apipackagealias }}.{{- pascalize .Name }}Params
+ {{- end }}
+ {{- if .Authorized}}, principal {{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}})
+ {{- if $.GenOpts.StrictResponders }} {{.Package}}.{{ pascalize .Name }}Responder { {{ else }} middleware.Responder { {{ end }}
+ return Impl.{{ pascalize .Name }}(params {{- if .Authorized}}, principal {{ end }})
+ })
+ {{- end }}
+
+ api.PreServerShutdown = func() { }
+
+ api.ServerShutdown = func() { }
+
+ // CustomConfigure can override or add to configurations set above
+ Impl.CustomConfigure(api)
+
+ return setupGlobalMiddleware(api.Serve(setupMiddlewares))
+}
+
+// The TLS configuration before HTTPS server starts.
+func configureTLS(tlsConfig *tls.Config) {
+ // Make all necessary changes to the TLS configuration here.
+ Impl.ConfigureTLS(tlsConfig)
+}
+
+// As soon as server is initialized but not run yet, this function will be called.
+// If you need to modify a config, store server instance to stop it individually later, this is the place.
+// This function can be called multiple times, depending on the number of serving schemes.
+// scheme value will be set accordingly: "http", "https" or "unix".
+func configureServer(s *http.Server, scheme, addr string) {
+ Impl.ConfigureServer(s, scheme, addr)
+}
+
+// The middleware configuration is for the handler executors. These do not apply to the swagger.json document.
+// The middleware executes after routing but before authentication, binding and validation.
+func setupMiddlewares(handler http.Handler) http.Handler {
+ return Impl.SetupMiddlewares(handler)
+}
+
+// The middleware configuration happens before anything, this middleware also applies to serving the swagger.json document.
+// So this is a good place to plug in a panic handling middleware, logging and metrics.
+func setupGlobalMiddleware(handler http.Handler) http.Handler {
+ return Impl.SetupGlobalMiddleware(handler)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/builder.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/builder.gotmpl
new file mode 100644
index 000000000..fda11859a
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/builder.gotmpl
@@ -0,0 +1,446 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{.Package}}
+{{ $package := .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "net/http"
+ "strings"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/runtime/security"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// New{{ pascalize .Name }}API creates a new {{ pascalize .Name }} instance
+func New{{ pascalize .Name }}API(spec *loads.Document) *{{ pascalize .Name }}API {
+ return &{{ pascalize .Name }}API{
+ handlers: make(map[string]map[string]http.Handler),
+ formats: strfmt.Default,
+ defaultConsumes: "{{ .DefaultConsumes }}",
+ defaultProduces: "{{ .DefaultProduces }}",
+ customConsumers: make(map[string]runtime.Consumer),
+ customProducers: make(map[string]runtime.Producer),
+ PreServerShutdown: func() { },
+ ServerShutdown: func() { },
+ spec: spec,
+ useSwaggerUI: false,
+ ServeError: errors.ServeError,
+ BasicAuthenticator: security.BasicAuth,
+ APIKeyAuthenticator: security.APIKeyAuth,
+ BearerAuthenticator: security.BearerAuth,
+ {{ range .Consumes }}
+ {{- if .Implementation }}
+ {{ pascalize .Name }}Consumer: {{ .Implementation }},
+ {{- else }}
+ {{ pascalize .Name }}Consumer: runtime.ConsumerFunc(func(r io.Reader, target interface{}) error {
+ return errors.NotImplemented("{{.Name}} consumer has not yet been implemented")
+ }),
+ {{- end }}
+ {{- end }}
+ {{ range .Produces }}
+ {{- if .Implementation }}
+ {{ pascalize .Name }}Producer: {{ .Implementation }},
+ {{- else }}
+ {{ pascalize .Name }}Producer: runtime.ProducerFunc(func(w io.Writer, data interface{}) error {
+ return errors.NotImplemented("{{.Name}} producer has not yet been implemented")
+ }),
+ {{- end }}
+ {{- end }}
+ {{ range .Operations }}
+ {{ if ne .Package $package }}{{ pascalize .Package }}{{ end }}{{ pascalize .Name }}Handler:
+ {{- if ne .Package $package }}{{ .PackageAlias }}.{{ end }}{{ pascalize .Name }}HandlerFunc(func(params {{ if ne .Package $package }}{{ .PackageAlias }}.{{end }}
+ {{- if $.GenOpts.StrictResponders}}
+ {{- pascalize .Name }}Params{{if .Authorized}}, principal {{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}}) {{if ne .Package $package }}{{ .Package }}.{{ end }}{{ pascalize .Name }}Responder {
+ return {{if ne .Package $package }}{{ .Package }}.{{ end }}{{ pascalize .Name }}NotImplemented()
+ {{else}}
+ {{- pascalize .Name }}Params{{if .Authorized}}, principal {{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}}) middleware.Responder {
+ return middleware.NotImplemented("operation {{ if ne .Package $package }}{{ .Package }}.{{ end }}{{pascalize .Name}} has not yet been implemented")
+ {{ end -}}
+ }),
+ {{- end }}
+ {{ range .SecurityDefinitions }}
+ {{- if .IsBasicAuth }}
+ // Applies when the Authorization header is set with the Basic scheme
+ {{ pascalize .ID }}Auth: func(user string, pass string) ({{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("basic auth ({{ .ID }}) has not yet been implemented")
+ },
+ {{- end }}
+ {{- if .IsAPIKeyAuth }}
+ // Applies when the "{{ .Name }}" {{ .Source }} is set
+ {{ pascalize .ID }}Auth: func(token string) ({{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("api key auth ({{ .ID }}) {{.Name}} from {{.Source}} param [{{ .Name }}] has not yet been implemented")
+ },
+ {{- end }}
+ {{- if .IsOAuth2 }}
+ {{ pascalize .ID }}Auth: func(token string, scopes []string) ({{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("oauth2 bearer auth ({{ .ID }}) has not yet been implemented")
+ },
+ {{- end }}
+ {{- end }}
+ {{- if .SecurityDefinitions }}
+ // default authorizer is authorized meaning no requests are blocked
+ APIAuthorizer: security.Authorized(),
+ {{- end }}
+ }
+}
+
+/*{{ pascalize .Name }}API {{ if .Info }}{{ if .Info.Description }}{{.Info.Description}}{{ else }}the {{ humanize .Name }} API{{ end }}{{ end }} */
+type {{ pascalize .Name }}API struct {
+ spec *loads.Document
+ context *middleware.Context
+ handlers map[string]map[string]http.Handler
+ formats strfmt.Registry
+ customConsumers map[string]runtime.Consumer
+ customProducers map[string]runtime.Producer
+ defaultConsumes string
+ defaultProduces string
+ Middleware func(middleware.Builder) http.Handler
+ useSwaggerUI bool
+
+ // BasicAuthenticator generates a runtime.Authenticator from the supplied basic auth function.
+ // It has a default implementation in the security package, however you can replace it for your particular usage.
+ BasicAuthenticator func(security.UserPassAuthentication) runtime.Authenticator
+
+ // APIKeyAuthenticator generates a runtime.Authenticator from the supplied token auth function.
+ // It has a default implementation in the security package, however you can replace it for your particular usage.
+ APIKeyAuthenticator func(string, string, security.TokenAuthentication) runtime.Authenticator
+
+ // BearerAuthenticator generates a runtime.Authenticator from the supplied bearer token auth function.
+ // It has a default implementation in the security package, however you can replace it for your particular usage.
+ BearerAuthenticator func(string, security.ScopedTokenAuthentication) runtime.Authenticator
+ {{ range .Consumes }}
+ // {{ pascalize .Name }}Consumer registers a consumer for the following mime types:
+ {{- range .AllSerializers }}
+ // - {{ .MediaType }}
+ {{- end }}
+ {{ pascalize .Name }}Consumer runtime.Consumer
+ {{- end }}
+ {{ range .Produces}}
+ // {{ pascalize .Name }}Producer registers a producer for the following mime types:
+ {{- range .AllSerializers }}
+ // - {{ .MediaType }}
+ {{- end }}
+ {{ pascalize .Name }}Producer runtime.Producer
+ {{- end }}
+ {{ range .SecurityDefinitions}}
+ {{- if .IsBasicAuth}}
+
+ // {{ pascalize .ID }}Auth registers a function that takes username and password and returns a principal
+ // it performs authentication with basic auth
+ {{ pascalize .ID }}Auth func(string, string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{- end }}
+ {{- if .IsAPIKeyAuth}}
+
+ // {{ pascalize .ID }}Auth registers a function that takes a token and returns a principal
+ // it performs authentication based on an api key {{ .Name }} provided in the {{.Source}}
+ {{ pascalize .ID }}Auth func(string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{- end }}
+ {{- if .IsOAuth2 }}
+
+ // {{ pascalize .ID }}Auth registers a function that takes an access token and a collection of required scopes and returns a principal
+ // it performs authentication based on an oauth2 bearer token provided in the request
+ {{ pascalize .ID }}Auth func(string, []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{- end }}
+ {{- end }}
+ {{- if .SecurityDefinitions }}
+
+ // APIAuthorizer provides access control (ACL/RBAC/ABAC) by providing access to the request and authenticated principal
+ APIAuthorizer runtime.Authorizer
+ {{- end }}
+ {{- $package := .Package }}
+ {{ range .Operations }}
+ // {{ if ne .Package $package }}{{ pascalize .Package }}{{ end }}{{ pascalize .Name }}Handler sets the operation handler for the {{ humanize .Name }} operation
+ {{ if ne .Package $package }}{{ pascalize .Package }}{{ end }}{{ pascalize .Name }}Handler {{ if ne .Package $package }}{{ .PackageAlias }}.{{ end }}{{ pascalize .Name }}Handler
+ {{- end }}
+
+ // ServeError is called when an error is received, there is a default handler
+ // but you can set your own with this
+ ServeError func(http.ResponseWriter, *http.Request, error)
+
+ // PreServerShutdown is called before the HTTP(S) server is shutdown
+ // This allows for custom functions to get executed before the HTTP(S) server stops accepting traffic
+ PreServerShutdown func()
+
+ // ServerShutdown is called when the HTTP(S) server is shut down and done
+ // handling all active connections and does not accept connections any more
+ ServerShutdown func()
+
+ // Custom command line argument groups with their descriptions
+ CommandLineOptionsGroups []swag.CommandLineOptionsGroup
+
+ // User defined logger function.
+ Logger func(string, ...interface{})
+}
+
+// UseRedoc for documentation at /docs
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) UseRedoc() {
+ {{.ReceiverName}}.useSwaggerUI = false
+}
+
+// UseSwaggerUI for documentation at /docs
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) UseSwaggerUI() {
+ {{.ReceiverName}}.useSwaggerUI = true
+}
+
+// SetDefaultProduces sets the default produces media type
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) SetDefaultProduces(mediaType string) {
+ {{.ReceiverName}}.defaultProduces = mediaType
+}
+
+// SetDefaultConsumes returns the default consumes media type
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) SetDefaultConsumes(mediaType string) {
+ {{.ReceiverName}}.defaultConsumes = mediaType
+}
+
+// SetSpec sets a spec that will be served for the clients.
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) SetSpec(spec *loads.Document) {
+ {{.ReceiverName}}.spec = spec
+}
+
+// DefaultProduces returns the default produces media type
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) DefaultProduces() string {
+ return {{.ReceiverName}}.defaultProduces
+}
+
+// DefaultConsumes returns the default consumes media type
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) DefaultConsumes() string {
+ return {{.ReceiverName}}.defaultConsumes
+}
+
+// Formats returns the registered string formats
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Formats() strfmt.Registry {
+ return {{.ReceiverName}}.formats
+}
+
+// RegisterFormat registers a custom format validator
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) RegisterFormat(name string, format strfmt.Format, validator strfmt.Validator) {
+ {{.ReceiverName}}.formats.Add(name, format, validator)
+}
+
+// Validate validates the registrations in the {{ pascalize .Name }}API
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Validate() error {
+ var unregistered []string
+ {{ range .Consumes }}
+ if {{.ReceiverName}}.{{ pascalize .Name }}Consumer == nil {
+ unregistered = append(unregistered, "{{ pascalize .Name }}Consumer")
+ }
+ {{- end }}
+ {{ range .Produces }}
+ if {{.ReceiverName}}.{{ pascalize .Name }}Producer == nil {
+ unregistered = append(unregistered, "{{ pascalize .Name }}Producer")
+ }
+ {{- end }}
+ {{ range .SecurityDefinitions }}
+ if {{.ReceiverName}}.{{ pascalize .ID }}Auth == nil {
+ unregistered = append(unregistered, "{{if .IsAPIKeyAuth }}{{ pascalize .Name }}{{ else }}{{ pascalize .ID }}{{ end }}Auth")
+ }
+ {{- end }}
+ {{ range .Operations }}
+ if {{.ReceiverName}}.{{ if ne .Package $package }}{{ pascalize .Package }}{{ end }}{{ pascalize .Name }}Handler == nil {
+ unregistered = append(unregistered, "{{ if ne .Package $package }}{{ .Package }}.{{ end }}{{ pascalize .Name }}Handler")
+ }
+ {{- end }}
+
+ if len(unregistered) > 0 {
+ return fmt.Errorf("missing registration: %s", strings.Join(unregistered, ", "))
+ }
+
+ return nil
+}
+// ServeErrorFor gets a error handler for a given operation id
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) ServeErrorFor(operationID string) func(http.ResponseWriter, *http.Request, error) {
+ return {{.ReceiverName}}.ServeError
+}
+// AuthenticatorsFor gets the authenticators for the specified security schemes
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) AuthenticatorsFor(schemes map[string]spec.SecurityScheme) map[string]runtime.Authenticator {
+ {{- if .SecurityDefinitions }}
+ result := make(map[string]runtime.Authenticator)
+ for name := range schemes {
+ switch name {
+ {{- range .SecurityDefinitions }}
+ case "{{.ID}}":
+ {{- if .IsBasicAuth }}
+ result[name] = {{.ReceiverName}}.BasicAuthenticator({{ if not ( eq .Principal "interface{}" ) }}func(username, password string) (interface{}, error) {
+ return {{ end }}{{.ReceiverName}}.{{ pascalize .ID }}Auth{{ if not ( eq .Principal "interface{}" ) }}(username, password)
+ }{{ end }})
+ {{- end }}
+ {{- if .IsAPIKeyAuth }}
+ scheme := schemes[name]
+ result[name] = {{.ReceiverName}}.APIKeyAuthenticator(scheme.Name, scheme.In, {{ if not ( eq .Principal "interface{}" ) }}func(token string) (interface{}, error) {
+ return {{ end }}{{.ReceiverName}}.{{ pascalize .ID }}Auth{{ if not ( eq .Principal "interface{}" ) }}(token)
+ }{{ end }})
+ {{- end }}
+ {{- if .IsOAuth2 }}
+ result[name] = {{.ReceiverName}}.BearerAuthenticator(name, {{ if not ( eq .Principal "interface{}" ) }}func(token string, scopes []string) (interface{}, error) {
+ return {{ end }}{{.ReceiverName}}.{{ pascalize .ID }}Auth{{ if not ( eq .Principal "interface{}" ) }}(token, scopes)
+ }{{ end }})
+ {{- end }}
+ {{end}}
+ }
+ }
+ return result
+ {{- else }}
+ return nil
+ {{- end }}
+}
+
+// Authorizer returns the registered authorizer
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Authorizer() runtime.Authorizer {
+ {{- if .SecurityDefinitions }}
+ return {{.ReceiverName}}.APIAuthorizer
+ {{- else }}
+ return nil
+ {{- end }}
+}
+
+// ConsumersFor gets the consumers for the specified media types.
+// MIME type parameters are ignored here.
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) ConsumersFor(mediaTypes []string) map[string]runtime.Consumer {
+ {{- if .Consumes }}
+ result := make(map[string]runtime.Consumer, len(mediaTypes))
+ for _, mt := range mediaTypes {
+ switch mt {
+ {{- range .Consumes }}
+ {{- range .AllSerializers }}
+ case "{{ .MediaType }}":
+ result["{{ .MediaType }}"] = {{.ReceiverName}}.{{ pascalize .Name }}Consumer
+ {{- end }}
+ {{- end }}
+ }
+
+ if c, ok := {{.ReceiverName}}.customConsumers[mt]; ok {
+ result[mt] = c
+ }
+ }
+ return result
+ {{- else }}
+ return nil
+ {{- end }}
+}
+
+// ProducersFor gets the producers for the specified media types.
+// MIME type parameters are ignored here.
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) ProducersFor(mediaTypes []string) map[string]runtime.Producer {
+ {{- if .Produces }}
+ result := make(map[string]runtime.Producer, len(mediaTypes))
+ for _, mt := range mediaTypes {
+ switch mt {
+ {{- range .Produces }}
+ {{- range .AllSerializers }}
+ case "{{ .MediaType }}":
+ result["{{ .MediaType }}"] = {{.ReceiverName}}.{{ pascalize .Name }}Producer
+ {{- end }}
+ {{- end }}
+ }
+
+ if p, ok := {{.ReceiverName}}.customProducers[mt]; ok {
+ result[mt] = p
+ }
+ }
+ return result
+ {{- else }}
+ return nil
+ {{- end }}
+}
+
+// HandlerFor gets a http.Handler for the provided operation method and path
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) HandlerFor(method, path string) (http.Handler, bool) {
+ if {{.ReceiverName}}.handlers == nil {
+ return nil, false
+ }
+ um := strings.ToUpper(method)
+ if _, ok := {{.ReceiverName}}.handlers[um]; !ok {
+ return nil, false
+ }
+ if path == "/" {
+ path = ""
+ }
+ h, ok := {{.ReceiverName}}.handlers[um][path]
+ return h, ok
+}
+
+// Context returns the middleware context for the {{ humanize .Name }} API
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Context() *middleware.Context {
+ if {{.ReceiverName}}.context == nil {
+ {{.ReceiverName}}.context = middleware.NewRoutableContext({{.ReceiverName}}.spec, {{.ReceiverName}}, nil)
+ }
+
+ return {{ .ReceiverName }}.context
+}
+
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) initHandlerCache() {
+ {{.ReceiverName}}.Context() // don't care about the result, just that the initialization happened
+ {{- if .Operations }}
+ if {{ .ReceiverName }}.handlers == nil {
+ {{.ReceiverName}}.handlers = make(map[string]map[string]http.Handler)
+ }
+ {{ range .Operations }}
+ if {{ .ReceiverName }}.handlers[{{ printf "%q" (upper .Method) }}] == nil {
+ {{ .ReceiverName }}.handlers[{{ printf "%q" (upper .Method) }}] = make(map[string]http.Handler)
+ }
+ {{.ReceiverName}}.handlers[{{ printf "%q" (upper .Method) }}][{{ if eq .Path "/" }}""{{ else }}{{ printf "%q" (cleanPath .Path) }}{{ end }}] = {{ if ne .Package $package }}{{ .PackageAlias }}.{{ end }}New{{ pascalize .Name }}({{.ReceiverName}}.context, {{.ReceiverName}}.{{if ne .Package $package}}{{ pascalize .Package }}{{end}}{{ pascalize .Name }}Handler)
+ {{- end }}
+ {{- end }}
+}
+
+// Serve creates a http handler to serve the API over HTTP
+// can be used directly in http.ListenAndServe(":8000", api.Serve(nil))
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Serve(builder middleware.Builder) http.Handler {
+ {{ .ReceiverName }}.Init()
+
+ if {{ .ReceiverName}}.Middleware != nil {
+ return {{ .ReceiverName }}.Middleware(builder)
+ }
+ if {{.ReceiverName}}.useSwaggerUI {
+ return {{.ReceiverName}}.context.APIHandlerSwaggerUI(builder)
+ }
+ return {{.ReceiverName}}.context.APIHandler(builder)
+}
+
+// Init allows you to just initialize the handler cache, you can then recompose the middleware as you see fit
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Init() {
+ if len({{.ReceiverName}}.handlers) == 0 {
+ {{.ReceiverName}}.initHandlerCache()
+ }
+}
+
+// RegisterConsumer allows you to add (or override) a consumer for a media type.
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) RegisterConsumer(mediaType string, consumer runtime.Consumer) {
+ {{.ReceiverName}}.customConsumers[mediaType] = consumer
+}
+
+// RegisterProducer allows you to add (or override) a producer for a media type.
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) RegisterProducer(mediaType string, producer runtime.Producer) {
+ {{.ReceiverName}}.customProducers[mediaType] = producer
+}
+
+// AddMiddlewareFor adds a http middleware to existing handler
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) AddMiddlewareFor(method, path string, builder middleware.Builder) {
+ um := strings.ToUpper(method)
+ if path == "/" {
+ path = ""
+ }
+ {{.ReceiverName}}.Init()
+ if h, ok := {{.ReceiverName}}.handlers[um][path]; ok {
+ {{.ReceiverName}}.handlers[um][path] = builder(h)
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/configureapi.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/configureapi.gotmpl
new file mode 100644
index 000000000..cbbb0bfd1
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/configureapi.gotmpl
@@ -0,0 +1,167 @@
+// This file is safe to edit. Once it exists it will not be overwritten
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+import (
+ "context"
+ "crypto/tls"
+ "io"
+ "log"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/runtime/security"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+{{ with .GenOpts }}
+//go:generate swagger generate server --target {{ .TargetPath }} --name {{ .Name }} --spec {{ .SpecPath }}
+{{- if .APIPackage }}{{ if ne .APIPackage "operations" }} --api-package {{ .APIPackage }}{{ end }}{{ end }}
+{{- if .ModelPackage }}{{ if ne .ModelPackage "models" }} --model-package {{ .ModelPackage }}{{ end }}{{ end }}
+{{- if .ServerPackage }}{{ if ne .ServerPackage "restapi"}} --server-package {{ .ServerPackage }}{{ end }}{{ end }}
+{{- if .ClientPackage }}{{ if ne .ClientPackage "client" }} --client-package {{ .ClientPackage }}{{ end }}{{ end }}
+{{- if .TemplateDir }} --template-dir {{ .TemplateDir }}{{ end }}
+{{- range .Operations }} --operation {{ . }}{{ end }}
+{{- range .Tags }} --tags {{ . }}{{ end }}
+{{- if .Principal }} --principal {{ .Principal }}{{ end }}
+{{- if .DefaultScheme }}{{ if ne .DefaultScheme "http" }} --default-scheme {{ .DefaultScheme }}{{ end }}{{ end }}
+{{- range .Models }} --model {{ . }}{{ end }}
+{{- if or (not .IncludeModel) (not .IncludeValidator) }} --skip-models{{ end }}
+{{- if or (not .IncludeHandler) (not .IncludeParameters ) (not .IncludeResponses) }} --skip-operations{{ end }}
+{{- if not .IncludeSupport }} --skip-support{{ end }}
+{{- if not .IncludeMain }} --exclude-main{{ end }}
+{{- if .ExcludeSpec }} --exclude-spec{{ end }}
+{{- if .DumpData }} --dump-data{{ end }}
+{{- if .StrictResponders }} --strict-responders{{ end }}
+{{ end }}
+func configureFlags(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API) {
+ // api.CommandLineOptionsGroups = []swag.CommandLineOptionsGroup{ ... }
+}
+
+func configureAPI(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API) http.Handler {
+ // configure the api here
+ api.ServeError = errors.ServeError
+
+ // Set your custom logger if needed. Default one is log.Printf
+ // Expected interface func(string, ...interface{})
+ //
+ // Example:
+ // api.Logger = log.Printf
+
+ api.UseSwaggerUI()
+ // To continue using redoc as your UI, uncomment the following line
+ // api.UseRedoc()
+
+ {{ range .Consumes }}
+ {{- if .Implementation }}
+ api.{{ pascalize .Name }}Consumer = {{ .Implementation }}
+ {{- else }}
+ api.{{ pascalize .Name }}Consumer = runtime.ConsumerFunc(func(r io.Reader, target interface{}) error {
+ return errors.NotImplemented("{{.Name}} consumer has not yet been implemented")
+ })
+ {{- end }}
+ {{- end }}
+ {{ range .Produces }}
+ {{- if .Implementation }}
+ api.{{ pascalize .Name }}Producer = {{ .Implementation }}
+ {{- else }}
+ api.{{ pascalize .Name }}Producer = runtime.ProducerFunc(func(w io.Writer, data interface{}) error {
+ return errors.NotImplemented("{{.Name}} producer has not yet been implemented")
+ })
+ {{- end }}
+ {{- end}}
+ {{ range .SecurityDefinitions }}
+ {{- if .IsBasicAuth }}
+ // Applies when the Authorization header is set with the Basic scheme
+ if api.{{ pascalize .ID }}Auth == nil {
+ api.{{ pascalize .ID }}Auth = func(user string, pass string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("basic auth ({{ .ID }}) has not yet been implemented")
+ }
+ }
+ {{- else if .IsAPIKeyAuth }}
+ // Applies when the "{{ .Name }}" {{ .Source }} is set
+ if api.{{ pascalize .ID }}Auth == nil {
+ api.{{ pascalize .ID }}Auth = func(token string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("api key auth ({{ .ID }}) {{.Name}} from {{.Source}} param [{{ .Name }}] has not yet been implemented")
+ }
+ }
+ {{- else if .IsOAuth2 }}
+ if api.{{ pascalize .ID }}Auth == nil {
+ api.{{ pascalize .ID }}Auth = func(token string, scopes []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("oauth2 bearer auth ({{ .ID }}) has not yet been implemented")
+ }
+ }
+ {{- end }}
+ {{- end }}
+ {{- if .SecurityDefinitions }}
+
+ // Set your custom authorizer if needed. Default one is security.Authorized()
+ // Expected interface runtime.Authorizer
+ //
+ // Example:
+ // api.APIAuthorizer = security.Authorized()
+ {{- end }}
+ {{- $package := .Package }}
+ {{- $apipackagealias := .APIPackageAlias }}
+ {{- range .Operations }}
+ {{- if .HasFormParams }}
+ // You may change here the memory limit for this multipart form parser. Below is the default (32 MB).
+ // {{ if ne .Package $package }}{{ .PackageAlias }}{{ else }}{{ $apipackagealias }}{{ end }}.{{ pascalize .Name }}MaxParseMemory = 32 << 20
+ {{- end }}
+ {{- end }}
+ {{ range .Operations }}
+ if api.{{ if ne .Package $package }}{{ pascalize .Package }}{{ end }}{{ pascalize .Name }}Handler == nil {
+ api.{{ if ne .Package $package }}{{pascalize .Package}}{{ end }}{{ pascalize .Name }}Handler =
+ {{- if ne .Package $package }}
+ {{- .PackageAlias }}.{{- pascalize .Name }}HandlerFunc(func(params {{ .PackageAlias }}.{{- pascalize .Name }}Params
+ {{- else }}
+ {{- $apipackagealias }}.{{- pascalize .Name }}HandlerFunc(func(params {{ $apipackagealias }}.{{- pascalize .Name }}Params
+ {{- end }}
+ {{- if $.GenOpts.StrictResponders }}
+ {{- if .Authorized}}, principal {{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}}) {{.Package}}.{{ pascalize .Name }}Responder {
+ return {{.Package}}.{{ pascalize .Name }}NotImplemented()
+ {{ else }}
+ {{- if .Authorized}}, principal {{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}}) middleware.Responder {
+ return middleware.NotImplemented("operation {{ .Package}}.{{pascalize .Name}} has not yet been implemented")
+ {{ end -}}
+ })
+ }
+ {{- end }}
+
+ api.PreServerShutdown = func() { }
+
+ api.ServerShutdown = func() { }
+
+ return setupGlobalMiddleware(api.Serve(setupMiddlewares))
+}
+
+// The TLS configuration before HTTPS server starts.
+func configureTLS(tlsConfig *tls.Config) {
+ // Make all necessary changes to the TLS configuration here.
+}
+
+// As soon as server is initialized but not run yet, this function will be called.
+// If you need to modify a config, store server instance to stop it individually later, this is the place.
+// This function can be called multiple times, depending on the number of serving schemes.
+// scheme value will be set accordingly: "http", "https" or "unix".
+func configureServer(s *http.Server, scheme, addr string) {
+}
+
+// The middleware configuration is for the handler executors. These do not apply to the swagger.json document.
+// The middleware executes after routing but before authentication, binding and validation.
+func setupMiddlewares(handler http.Handler) http.Handler {
+ return handler
+}
+
+// The middleware configuration happens before anything, this middleware also applies to serving the swagger.json document.
+// So this is a good place to plug in a panic handling middleware, logging and metrics.
+func setupGlobalMiddleware(handler http.Handler) http.Handler {
+ return handler
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/doc.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/doc.gotmpl
new file mode 100644
index 000000000..b51734aa4
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/doc.gotmpl
@@ -0,0 +1,63 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{- if .Copyright }}
+// {{ comment .Copyright }}
+{{- end }}
+
+
+// Package {{ .APIPackage }} {{ if .Info.Title }}{{ comment .Info.Title }}{{ else }}{{ comment (humanize .Name) }}{{end}}
+//
+{{- if .Info.Description }}
+// {{ comment .Info.Description " " }}
+{{- end }}
+{{- if .Info.TermsOfService }}
+// Terms Of Service:
+// {{ comment .Info.TermsOfService " " }}
+{{- end }}
+{{- if or .Schemes .Host .BasePath .Info }}
+ {{- if .Schemes }}
+// Schemes:
+ {{- range .Schemes }}
+// {{ . }}
+ {{- end }}
+ {{- end }}
+ {{- if .Host }}
+// Host: {{ .Host }}
+ {{- end }}
+ {{- if .BasePath }}
+// BasePath: {{ .BasePath }}
+ {{- end}}
+ {{- with .Info }}
+ {{- if .Version }}
+// Version: {{ .Version }}
+ {{- end }}
+ {{- if .License }}
+// License: {{ if .License.Name }}{{ .License.Name}} {{ end }}{{ if .License.URL }}{{ .License.URL }}{{ end }}
+ {{- end }}
+ {{- if .Contact }}
+// Contact: {{ if .Contact.Name }}{{ .Contact.Name }}{{ end }}{{ if .Contact.Email }}<{{ .Contact.Email }}>{{ end }}{{ if .Contact.URL }} {{ .Contact.URL }}{{ end }}
+ {{- end }}
+ {{- end }}
+{{- end }}
+{{- if .Consumes }}
+//
+// Consumes:
+ {{- range .Consumes }}
+ {{- range .AllSerializers }}
+// - {{ .MediaType -}}
+ {{- end }}
+ {{- end }}
+{{- end }}
+{{- if .Produces }}
+//
+// Produces:
+ {{- range .Produces }}
+ {{- range .AllSerializers }}
+// - {{ .MediaType -}}
+ {{- end }}
+ {{- end }}
+{{- end }}
+//
+// swagger:meta
+package {{ .APIPackage }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/main.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/main.gotmpl
new file mode 100644
index 000000000..a6447ede7
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/main.gotmpl
@@ -0,0 +1,186 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+ {{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package main
+
+import (
+ "fmt"
+ "log"
+ "net/http"
+ "os"
+
+ "github.com/go-openapi/loads"
+ {{- if .UseGoStructFlags }}
+ flags "github.com/jessevdk/go-flags"
+ {{- end }}
+ {{- if .UsePFlags }}
+ flag "github.com/spf13/pflag"
+ {{- end }}
+ {{- if .UseFlags }}
+ "flag"
+ {{- end }}
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// This file was generated by the swagger tool.
+// Make sure not to overwrite this file after you generated it because all your edits would be lost!
+{{ if .ExcludeSpec }}
+func init() {
+ loads.AddLoader(fmts.YAMLMatcher, fmts.YAMLDoc)
+}
+{{ end }}
+
+func main() {
+ {{ if .UsePFlags }}
+ {{- if not .ExcludeSpec }}
+ swaggerSpec, err := loads.Embedded({{ .ServerPackageAlias }}.SwaggerJSON, {{ .ServerPackageAlias }}.FlatSwaggerJSON)
+ if err != nil {
+ log.Fatalln(err)
+ }
+ {{- end }}
+
+ var server *{{ .ServerPackageAlias }}.Server // make sure init is called
+
+ flag.Usage = func() {
+ fmt.Fprint(os.Stderr, "Usage:\n")
+ fmt.Fprint(os.Stderr, " {{ dasherize .Name }}-server [OPTIONS]\n\n")
+
+ title := {{ if .Info }}{{ if .Info.Title }}{{ printf "%q" .Info.Title }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end}}
+ fmt.Fprint(os.Stderr, title+"\n\n")
+ desc := {{ if .Info }}{{ if .Info.Description }}{{ printf "%q" .Info.Description }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end}}
+ if desc != "" {
+ fmt.Fprintf(os.Stderr, desc+"\n\n")
+ }
+ fmt.Fprintln(os.Stderr, flag.CommandLine.FlagUsages())
+ }
+ // parse the CLI flags
+ flag.Parse()
+ {{- if .ExcludeSpec }}
+
+ server = {{ .ServerPackageAlias }}.NewServer(nil)
+ swaggerSpec, err := loads.Spec(string(server.Spec))
+ if err != nil {
+ log.Fatalln(err)
+ }
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ server.SetAPI(api)
+ {{- else }}
+
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ // get server with flag values filled out
+ server = {{ .ServerPackageAlias }}.NewServer(api)
+ {{- end }}
+ defer server.Shutdown()
+
+ server.ConfigureAPI()
+ if err := server.Serve(); err != nil {
+ log.Fatalln(err)
+ }
+ {{ end }}
+ {{ if .UseGoStructFlags}}
+ {{- if .ExcludeSpec }}
+ server := {{ .ServerPackageAlias }}.NewServer(nil)
+ {{- else }}
+ swaggerSpec, err := loads.Embedded({{ .ServerPackageAlias }}.SwaggerJSON, {{ .ServerPackageAlias }}.FlatSwaggerJSON)
+ if err != nil {
+ log.Fatalln(err)
+ }
+
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ server := {{ .ServerPackageAlias }}.NewServer(api)
+ defer server.Shutdown()
+ {{- end }}
+
+ parser := flags.NewParser(server, flags.Default)
+ parser.ShortDescription = {{ if .Info }}{{ if .Info.Title }}{{ printf "%q" .Info.Title }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end}}
+ parser.LongDescription = {{ if .Info }}{{ if .Info.Description }}{{ printf "%q" .Info.Description }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end}}
+
+ {{- if not .ExcludeSpec }}
+ server.ConfigureFlags()
+ for _, optsGroup := range api.CommandLineOptionsGroups {
+ _, err := parser.AddGroup(optsGroup.ShortDescription, optsGroup.LongDescription, optsGroup.Options)
+ if err != nil {
+ log.Fatalln(err)
+ }
+ }
+ {{- end }}
+
+ if _, err := parser.Parse(); err != nil {
+ code := 1
+ if fe, ok := err.(*flags.Error); ok {
+ if fe.Type == flags.ErrHelp {
+ code = 0
+ }
+ }
+ os.Exit(code)
+ }
+ {{- if .ExcludeSpec }}
+ swaggerSpec, err := loads.Spec(string(server.Spec))
+ if err != nil {
+ log.Fatalln(err)
+ }
+
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ server.SetAPI(api)
+ defer server.Shutdown()
+ {{- end }}
+
+ server.ConfigureAPI()
+
+ if err := server.Serve(); err != nil {
+ log.Fatalln(err)
+ }
+ {{ end }}
+ {{ if .UseFlags}}
+ {{- if not .ExcludeSpec }}
+
+ swaggerSpec, err := loads.Embedded({{ .ServerPackageAlias }}.SwaggerJSON, {{ .ServerPackageAlias }}.FlatSwaggerJSON)
+ if err != nil {
+ log.Fatalln(err)
+ }
+ {{- end }}
+ var server *{{ .ServerPackageAlias }}.Server // make sure init is called
+
+ flag.Usage = func() {
+ fmt.Fprint(os.Stderr, "Usage:\n")
+ fmt.Fprint(os.Stderr, " {{ dasherize .Name }}-server [OPTIONS]\n\n")
+
+ title := {{ if .Info }}{{ if .Info.Title }}{{ printf "%q" .Info.Title }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end}}
+ fmt.Fprint(os.Stderr, title+"\n\n")
+ desc := {{ if .Info }}{{ if .Info.Description }}{{ printf "%q" .Info.Description }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end}}
+ if desc != "" {
+ fmt.Fprintf(os.Stderr, desc+"\n\n")
+ }
+ flag.CommandLine.SetOutput(os.Stderr)
+ flag.PrintDefaults()
+ }
+ // parse the CLI flags
+ flag.Parse()
+
+ {{- if .ExcludeSpec }}
+
+ server = {{ .ServerPackageAlias }}.NewServer(nil)
+ swaggerSpec, err := loads.Spec(string(server.Spec))
+ if err != nil {
+ log.Fatalln(err)
+ }
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ server.SetAPI(api)
+ {{- else }}
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ // get server with flag values filled out
+ server = {{ .ServerPackageAlias }}.NewServer(api)
+ {{- end }}
+ defer server.Shutdown()
+
+ server.ConfigureAPI()
+ if err := server.Serve(); err != nil {
+ log.Fatalln(err)
+ }
+ {{ end }}
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/operation.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/operation.gotmpl
new file mode 100644
index 000000000..041c00e44
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/operation.gotmpl
@@ -0,0 +1,92 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the generate command
+
+import (
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/runtime/security"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// {{ pascalize .Name }}HandlerFunc turns a function with the right signature into a {{ humanize .Name }} handler
+type {{ pascalize .Name }}HandlerFunc func({{ pascalize .Name }}Params{{ if .Authorized }}, {{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}{{ end }}) {{ if $.StrictResponders }} {{ pascalize .Name }}Responder {{else}} middleware.Responder {{end}}
+
+// Handle executing the request and returning a response
+func (fn {{ pascalize .Name }}HandlerFunc) Handle(params {{ pascalize .Name }}Params{{ if .Authorized }}, principal {{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}{{ end }}) {{ if $.StrictResponders }} {{ pascalize .Name }}Responder {{else}} middleware.Responder {{end}} {
+ return fn(params{{ if .Authorized }}, principal{{ end }})
+}
+
+// {{ pascalize .Name }}Handler interface for that can handle valid {{ humanize .Name }} params
+type {{ pascalize .Name }}Handler interface {
+ Handle({{ pascalize .Name }}Params{{ if .Authorized }}, {{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}{{ end }}) {{ if $.StrictResponders }} {{ pascalize .Name }}Responder {{else}} middleware.Responder {{end}}
+}
+
+// New{{ pascalize .Name }} creates a new http.Handler for the {{ humanize .Name }} operation
+func New{{ pascalize .Name }}(ctx *middleware.Context, handler {{ pascalize .Name }}Handler) *{{ pascalize .Name }} {
+ return &{{ pascalize .Name }}{Context: ctx, Handler: handler}
+}
+
+/* {{ pascalize .Name }} swagger:route {{ .Method }} {{ .Path }}{{ range .Tags }} {{ . }}{{ end }} {{ camelize .Name }}
+
+{{ if .Summary }}{{ .Summary }}{{ if .Description }}
+
+{{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}{{ pascalize .Name }} {{ humanize .Name }} API{{ end }}
+
+*/
+type {{ pascalize .Name }} struct {
+ Context *middleware.Context
+ Handler {{ pascalize .Name }}Handler
+}
+
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
+ route, rCtx, _ := {{ .ReceiverName }}.Context.RouteInfo(r)
+ if rCtx != nil {
+ *r = *rCtx
+ }
+ var Params = New{{ pascalize .Name }}Params()
+
+ {{- if .Authorized }}
+ uprinc, aCtx, err := {{ .ReceiverName }}.Context.Authorize(r, route)
+ if err != nil {
+ {{ .ReceiverName }}.Context.Respond(rw, r, route.Produces, route, err)
+ return
+ }
+ if aCtx != nil {
+ *r = *aCtx
+ }
+ var principal {{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}
+ if uprinc != nil {
+ principal = {{ if eq .Principal "inferface{}" }}uprinc{{ else }}uprinc.({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}) // this is really a {{ .Principal }}, I promise{{ end }}
+ }
+ {{ end }}
+ if err := {{ .ReceiverName }}.Context.BindValidRequest(r, route, &Params); err != nil { // bind params
+ {{ .ReceiverName }}.Context.Respond(rw, r, route.Produces, route, err)
+ return
+ }
+
+ res := {{ .ReceiverName }}.Handler.Handle(Params{{ if .Authorized }}, principal{{ end }}) // actually handle the request
+ {{ .ReceiverName }}.Context.Respond(rw, r, route.Produces, route, res)
+
+}
+
+{{ range .ExtraSchemas }}
+// {{ .Name }} {{ template "docstring" . }}
+//
+// swagger:model {{ .Name }}
+ {{- template "schema" . }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/parameter.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/parameter.gotmpl
new file mode 100644
index 000000000..1000a9f95
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/parameter.gotmpl
@@ -0,0 +1,720 @@
+{{ define "bindprimitiveparam" }}{{/* an empty test definition to test template repo dependencies resolution - DO NOT CHANGE THIS */}}
+{{ end }}
+{{ define "bodyvalidator" }}
+ {{- if .HasModelBodyParams }}
+ // validate body object{{/* delegate validation to model object */}}
+ if err := body.Validate(route.Formats); err != nil {
+ res = append(res, err)
+ }
+
+ ctx := validate.WithOperationRequest(r.Context())
+ if err := body.ContextValidate(ctx, route.Formats); err != nil {
+ res = append(res, err)
+ }
+
+ if len(res) == 0 {
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ }
+ {{- else if and .HasSimpleBodyParams .HasModelBodyItems }}
+
+ {{- if or .Schema.HasSliceValidations .Schema.Items.HasValidations }}
+
+ // validate array of body objects
+ {{- end }}
+
+ {{- if .Schema.HasSliceValidations }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ {{ template "sliceparamvalidator" . }}
+ {{- end }}
+
+ {{- if and .Schema.Items.HasValidations (not (or .Schema.Items.IsInterface .Schema.Items.IsStream)) }}
+ for {{ .IndexVar }} := range body {
+ {{- if .Schema.Items.IsNullable }}
+ if body[{{ .IndexVar }}] == nil {
+ {{- if .Schema.Items.Required }}
+ res = append(res, errors.Required({{ .Child.Path }}, {{ printf "%q" .Child.Location }}, body[{{ .IndexVar }}]))
+ break
+ {{- else }}
+ continue
+ {{- end }}
+ }
+ {{- end }}
+ if err := body[{{ .IndexVar }}].Validate(route.Formats); err != nil {
+ res = append(res, err)
+ break
+ }
+ }
+
+ {{- if not .Schema.HasSliceValidations }}
+
+ if len(res) == 0 {
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ }
+ {{- end }}
+ {{- else }}
+ // no validation for items in this slice
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ {{- end }}
+
+ {{- else if and .HasSimpleBodyParams .HasModelBodyMap }}
+
+ {{- if and .Schema.HasValidations (not (or .Schema.AdditionalProperties.IsInterface .Schema.AdditionalProperties.IsStream)) }}
+ // validate map of body objects
+ for {{ .KeyVar }} := range body {
+ {{- if .Schema.AdditionalProperties.Required }}
+ if err := validate.Required({{ if .Child.Path }}{{ .Child.Path }}{{ else }}""{{ end }}, {{ printf "%q" .Child.Location }}, {{ if not .IsAnonymous }}{{ .Schema.GoType }}({{ end }}body[{{ .KeyVar }}]{{ if not .IsAnonymous }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and .Schema.AdditionalProperties.IsNullable (not .IsMapNullOverride) }}
+ if body[{{ .KeyVar }}] == nil {
+ {{- if .Schema.AdditionalProperties.Required }}
+ res = append(res, errors.Required({{ .Path }}, {{ printf "%q" .Location }}, body[{{ .KeyVar }}]))
+ break
+ {{- else }}
+ continue
+ {{- end }}
+ }
+ {{- end }}
+ if val , ok :=body[{{ .KeyVar }}]; ok {
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if val != nil {
+ {{- end }}
+ if err := val.Validate(route.Formats); err != nil {
+ res = append(res, err)
+ break
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ }
+ }
+
+ if len(res) == 0 {
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ }
+ {{- else }}
+ // no validation for this map
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ {{- end }}
+ {{- else if .HasSimpleBodyParams }}
+ {{- if and (not .IsArray) (not .IsMap) .Schema.HasValidations }}
+ // validate inline body
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ if err := {{ .ReceiverName }}.validate{{ pascalize .ID }}Body(route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if and (or .IsArray .IsMap) .Schema.HasValidations }}
+ // validate inline body {{ if .IsArray }}array{{ else }}map{{ end }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ if err := {{ .ReceiverName }}.validate{{ pascalize .ID }}Body(route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else }}
+ // no validation required on inline body
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ {{- end}}
+ {{- else }}
+ {{- if .IsInterface }}
+ // no validation on generic interface
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ {{- end }}
+ {{- end }}
+{{- end }}
+
+{{ define "sliceparamvalidator"}}
+ {{- if or .MinItems .MaxItems }}
+
+ {{ camelize .Name }}Size := int64(len({{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}*{{ end }}{{ if and .Child (not (hasPrefix .ValueExpression "o.")) }}{{ .Child.ValueExpression }}C{{ else }}{{ .ValueExpression }}{{ end }}))
+ {{- end }}
+ {{- if .MinItems }}
+
+// {{ .ItemsDepth }}minItems: {{ .MinItems }}
+if err := validate.MinItems({{ .Path }}, {{ printf "%q" .Location }}, {{ camelize .Name }}Size, {{ .MinItems }}); err != nil {
+ return err
+}
+ {{- end }}
+ {{- if .MaxItems }}
+
+// {{ .ItemsDepth }}maxItems: {{ .MaxItems }}
+if err := validate.MaxItems({{ .Path }}, {{ printf "%q" .Location }}, {{ camelize .Name }}Size, {{.MaxItems}}); err != nil {
+ return err
+}
+ {{- end }}
+ {{- if .UniqueItems }}
+
+// {{ .ItemsDepth }}uniqueItems: true
+if err := validate.UniqueItems({{ .Path }}, {{ printf "%q" .Location }}, {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}*{{ end }}{{ if and .Child (not ( hasPrefix .ValueExpression "o." )) }}{{ .Child.ValueExpression }}C{{ else }}{{ .ValueExpression }}{{ end }}); err != nil {
+ return err
+}
+ {{- end }}
+ {{- if .Enum }}
+
+// {{ .ItemsDepth }}Enum: {{ .Enum }}
+if err := validate.EnumCase(
+ {{- .Path }}, {{ printf "%q" .Location }},
+ {{- if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}*{{ end -}}
+ {{- if .Child -}}
+ {{- if not ( hasPrefix .ValueExpression "o." ) -}}
+ {{- .Child.ValueExpression }}C{{- if .IsCustomFormatter }}.String(){{ end -}}
+ {{- else -}}
+ {{- .ValueExpression -}}{{- if .Child.IsCustomFormatter }}.String(){{ end -}}
+ {{- end -}}
+ {{- end -}},
+ {{- printf "%#v" .Enum -}}, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+{{- end }}
+
+{{- define "childvalidator" }}
+ {{- if .Converter }}
+ {{- if ne .SwaggerFormat "" }}
+ // {{ .ItemsDepth }}Format: {{ printf "%q" .SwaggerFormat }}
+ {{- end }}
+ {{ varname .ValueExpression }}, err := {{ .Converter }}({{ varname .ValueExpression }}V)
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, {{ printf "%q" .Location }}, "{{ .GoType }}", {{ varname .ValueExpression }})
+ }
+ {{- else if and .IsCustomFormatter (not .SkipParse) }}{{/* parsing is skipped for simple body items */}}
+ // {{ .ItemsDepth }}Format: {{ printf "%q" .SwaggerFormat }}
+ value, err := formats.Parse({{ printf "%q" .SwaggerFormat }},{{ varname .ValueExpression }}V)
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, {{ printf "%q" .Location }}, "{{ .GoType }}", value)
+ }
+ {{ varname .ValueExpression }} := *(value.(*{{.GoType}}))
+ {{- else if and .IsComplexObject .HasValidations }}{{/* dedicated to nested body params */}}
+ {{ varname .ValueExpression }} := {{ varname .ValueExpression }}V
+ if err := {{ .ValueExpression }}.Validate(formats) ; err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ .Path }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ .Path }})
+ }
+ return err
+ }
+ {{- else }}
+ {{ varname .ValueExpression }} := {{ varname .ValueExpression }}V
+ {{- end }}
+ {{ template "propertyparamvalidator" . }}
+{{- end }}
+
+{{- define "mapparamvalidator" }}
+ {{- if and .Child.HasValidations (not (or .Child.IsInterface .Child.IsStream)) }}
+
+ // validations for map
+ {{- else }}
+
+ // map has no validations: copying all elements
+ {{- end }}
+ {{ varname .Child.ValueExpression }}R := make({{ .GoType }},len({{ .Child.ValueExpression }}C))
+ for {{ .KeyVar }}, {{ .Child.ValueExpression }}V := range {{ .Child.ValueExpression}}C {
+ {{- if .Child.IsArray }}
+ {{ .Child.Child.ValueExpression }}C := {{ varname .Child.ValueExpression }}V
+ {{- if .Child.HasSliceValidations }}
+ {{- template "sliceparamvalidator" .Child }}
+ {{- end }}
+ {{- template "sliceparambinder" .Child }}
+ {{- else if .Child.IsMap }}
+ {{ .Child.Child.ValueExpression }}C := {{ varname .Child.ValueExpression }}V
+ {{ template "mapparamvalidator" .Child }}
+ {{- else }}
+ {{- if and .Child.IsNullable }}
+ if {{ varname .Child.ValueExpression }}V == nil {
+ {{- if .Child.Required }}
+ return errors.Required({{ .Child.Path }}, {{ printf "%q" .Child.Location }}, {{ varname .Child.ValueExpression }}V)
+ {{- else }}
+ continue
+ {{- end }}
+ }
+ {{- end }}
+ {{- template "childvalidator" .Child }}
+ {{- end }}
+ {{ varname .Child.ValueExpression }}R[{{.KeyVar}}] = {{ varname .Child.ValueExpression }}{{ if or .Child.IsArray .Child.IsMap}}IR{{end}}
+ }
+{{- end }}
+
+{{- define "propertyparamvalidator" }}
+ {{- if .IsPrimitive }}
+ {{ template "validationPrimitive" . }}
+ {{- end }}
+ {{- if and .IsCustomFormatter (not .IsStream) (not .IsBase64) }}
+
+if err := validate.FormatOf({{.Path}}, "{{.Location}}", "{{.SwaggerFormat}}", {{ .ValueExpression}}.String(), formats); err != nil {
+ return err
+}
+ {{- end }}
+ {{- if .IsArray }}{{/* slice validations */}}
+ {{ template "sliceparamvalidator" . }}
+ {{- else if .IsMap }}
+ {{ .Child.ValueExpression }}C := {{ varname .Child.ValueExpression }}V
+ {{ template "mapparamvalidator" . }}
+ {{- end }}
+{{- end }}
+
+{{ define "sliceparambinder" }}
+var {{ varname .Child.ValueExpression }}R {{ .GoType }}
+for {{ if .Child.NeedsIndex }}{{ .IndexVar }}{{ else }}_{{ end }}, {{ varname .Child.ValueExpression }}V := range {{ varname .Child.ValueExpression }}C {
+ {{- if .Child.IsArray }}{{/* recursive resolution of arrays in params */}}
+ {{- if not .Child.SkipParse }}
+ // {{ .Child.ItemsDepth }}CollectionFormat: {{ .Child.CollectionFormat }}
+ {{- end }}
+ {{ .Child.Child.ValueExpression }}C := {{ if .Child.SkipParse }}{{ varname .Child.ValueExpression }}V{{ else }}swag.SplitByFormat({{ varname .Child.ValueExpression }}V, {{ printf "%q" .Child.CollectionFormat }}){{ end }}
+ {{- if .Child.HasSliceValidations }}
+ {{- template "sliceparamvalidator" .Child }}
+ {{- end }}
+ if len({{ varname .Child.Child.ValueExpression }}C) > 0 {
+ {{ template "sliceparambinder" .Child }}
+ {{ varname .Child.ValueExpression }}R = append({{ varname .Child.ValueExpression }}R, {{ varname .Child.ValueExpression }}{{ if or .Child.IsArray .Child.IsMap }}IR{{end}})
+ }
+ {{- else if .Child.IsMap }}{{/* simple map in items (possible with body params)*/}}
+ {{ .Child.Child.ValueExpression }}C := {{ varname .Child.ValueExpression }}V
+ {{- template "mapparamvalidator" .Child }}
+ {{ varname .Child.ValueExpression }}R = append({{ varname .Child.ValueExpression }}R, {{ varname .Child.ValueExpression }}{{ if or .Child.IsArray .Child.IsMap }}IR{{end}})
+ {{- else }}{{/* non-array && non-map type in items */}}
+ {{- if and .Child.IsNullable (not .IsMapNullOverride) }}
+ if {{ varname .Child.ValueExpression }}V == nil {
+ {{- if .Child.Required }}
+ return errors.Required({{ .Child.Path }}, {{ printf "%q" .Child.Location }}, {{ varname .Child.ValueExpression }}V)
+ {{- else }}
+ continue
+ {{- end }}
+ }
+ {{- end }}
+ {{- template "childvalidator" .Child }}
+ {{ varname .Child.ValueExpression }}R = append({{ varname .Child.ValueExpression }}R, {{ varname .Child.ValueExpression }}{{ if or .Child.IsArray .Child.IsMap }}IR{{end}})
+ {{- end }}
+}
+{{ end }}
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "fmt"
+ "io"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/security"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+{{- if .HasFormParams }}
+
+// {{ pascalize .Name }}MaxParseMemory sets the maximum size in bytes for
+// the multipart form parser for this operation.
+//
+// The default value is 32 MB.
+// The multipart parser stores up to this + 10MB.
+var {{ pascalize .Name }}MaxParseMemory int64 = 32 << 20
+{{- end }}
+
+// New{{ pascalize .Name }}Params creates a new {{ pascalize .Name }}Params object
+{{- if .Params.HasSomeDefaults }}
+// with the default values initialized.
+{{- else }}
+//
+// There are no default values defined in the spec.
+{{- end }}
+func New{{ pascalize .Name }}Params() {{ pascalize .Name }}Params {
+{{ if .Params.HasSomeDefaults }}
+ var (
+ // initialize parameters with default values
+ {{ range .Params }}
+ {{ if .HasDefault -}}
+ {{ if not .IsFileParam }}{{ varname .ID}}Default =
+ {{- if and .IsPrimitive .IsCustomFormatter (not (stringContains .Zero "(\"" )) }}{{ .Zero }}{{/* strfmt type initializer requires UnmarshalText(), e.g. Date, Datetime, Duration */}}
+ {{- else if and .IsPrimitive .IsCustomFormatter (stringContains .Zero "(\"" ) }}{{.GoType}}({{- printf "%#v" .Default }}){{/* strfmt type initializer takes string */}}
+ {{- else if and .IsPrimitive (not .IsCustomFormatter) -}}{{.GoType}}({{- printf "%#v" .Default }}){{/* regular go primitive type initializer */}}
+ {{- else if .IsArray -}}{{- /* Do not initialize from possible defaults in nested arrays */ -}}
+ {{- if and .Child.IsPrimitive .Child.IsCustomFormatter }}{{ .Zero }}{{/* initialization strategy with UnmarshalText() */}}
+ {{- else if .Child.IsArray -}}{{ .Zero }}{{/* initialization strategy with json.Unmarshal() */}}
+ {{- else if and .Child.IsPrimitive (not .Child.IsCustomFormatter) -}}{{.GoType}}{{- arrayInitializer .Default }}{{/* regular go primitive type initializer: simple slice initializer */}}
+ {{- else }}{{ printf "%#v" .Default }}{{/* all other cases (e.g. schema) [should not occur] */}}
+ {{- end }}
+ {{- else }}{{ printf "%#v" .Default }}{{/* case .Schema */}}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ )
+
+{{ range .Params }}{{ if .HasDefault -}}{{- /* carry out UnmarshalText initialization strategy */ -}}
+ {{ if and .IsPrimitive .IsCustomFormatter (not (stringContains .Zero "(\"")) }}{{ varname .ID}}Default.UnmarshalText([]byte({{ printf "%q" .Default }}))
+ {{ else if .IsArray -}}
+ {{ if or ( and .Child.IsPrimitive .Child.IsCustomFormatter ) .Child.IsArray -}}
+ if err := json.Unmarshal([]byte(`{{printf "%s" (json .Default)}}`), &{{ varname .ID }}Default); err != nil {
+ // panics if specification is invalid
+ msg := fmt.Sprintf("invalid default value for parameter {{ varname .ID }}: %v",err)
+ panic(msg)
+ }
+ {{ end -}}
+ {{- end }}
+ {{ end -}}
+{{- end }}
+{{ end }}
+ return {{ pascalize .Name }}Params{ {{ range .Params }}{{ if .HasDefault }}
+ {{ pascalize .ID}}: {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}&{{ end }}{{ varname .ID }}Default,
+ {{ end }}{{ end }} }
+}
+
+// {{ pascalize .Name }}Params contains all the bound params for the {{ humanize .Name }} operation
+// typically these are obtained from a http.Request
+//
+// swagger:parameters {{ .Name }}
+type {{ pascalize .Name }}Params struct {
+
+ // HTTP Request Object
+ HTTPRequest *http.Request `json:"-"`
+
+ {{ range .Params }}/*{{ if .Description }}{{ blockcomment .Description }}{{ end }}{{ if .Required }}
+ Required: true{{ end }}{{ if .Maximum }}
+ Maximum: {{ if .ExclusiveMaximum }}< {{ end }}{{ .Maximum }}{{ end }}{{ if .Minimum }}
+ Minimum: {{ if .ExclusiveMinimum }}> {{ end }}{{ .Minimum }}{{ end }}{{ if .MultipleOf }}
+ Multiple Of: {{ .MultipleOf }}{{ end }}{{ if .MaxLength }}
+ Max Length: {{ .MaxLength }}{{ end }}{{ if .MinLength }}
+ Min Length: {{ .MinLength }}{{ end }}{{ if .Pattern }}
+ Pattern: {{ .Pattern }}{{ end }}{{ if .MaxItems }}
+ Max Items: {{ .MaxItems }}{{ end }}{{ if .MinItems }}
+ Min Items: {{ .MinItems }}{{ end }}{{ if .UniqueItems }}
+ Unique: true{{ end }}{{ if .Location }}
+ In: {{ .Location }}{{ end }}{{ if .CollectionFormat }}
+ Collection Format: {{ .CollectionFormat }}{{ end }}{{ if .HasDefault }}
+ Default: {{ printf "%#v" .Default }}{{ end }}
+ */
+ {{ if not .Schema }}{{ pascalize .ID }} {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}*{{ end }}{{.GoType}}{{ else }}{{ pascalize .Name }} {{ if and (not .Schema.IsBaseType) .IsNullable (not .Schema.IsStream) }}*{{ end }}{{.GoType}}{{ end }}
+ {{ end}}
+}
+
+// BindRequest both binds and validates a request, it assumes that complex things implement a Validatable(strfmt.Registry) error interface
+// for simple values it will use straight method calls.
+//
+// To ensure default values, the struct must have been initialized with New{{ pascalize .Name }}Params() beforehand.
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) BindRequest(r *http.Request, route *middleware.MatchedRoute) error {
+ var res []error
+
+ {{ .ReceiverName }}.HTTPRequest = r
+
+{{- if .HasQueryParams }}
+
+ qs := runtime.Values(r.URL.Query())
+{{- end }}
+
+{{- if .HasFormParams }}
+
+ if err := r.ParseMultipartForm({{ pascalize .Name }}MaxParseMemory); err != nil {
+ if err != http.ErrNotMultipart {
+ return errors.New(400,"%v",err)
+ } else if err := r.ParseForm(); err != nil {
+ return errors.New(400,"%v",err)
+ }
+ }
+ {{- if .HasFormValueParams }}
+ fds := runtime.Values(r.Form)
+ {{- end }}
+{{- end }}
+{{ range .Params }}
+ {{- if not .IsArray }}
+ {{- if .IsQueryParam }}
+
+ q{{ pascalize .Name }}, qhk{{ pascalize .Name }}, _ := qs.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(q{{ pascalize .Name }}, qhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if .IsPathParam }}
+
+ r{{ pascalize .Name }}, rhk{{ pascalize .Name }}, _ := route.Params.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(r{{ pascalize .Name }}, rhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if .IsHeaderParam }}
+
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(r.Header[http.CanonicalHeaderKey({{ .Path }})], true, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if .IsFormParam }}
+ {{- if .IsFileParam }}
+
+ {{ camelize .Name }}, {{ camelize .Name }}Header, err := r.FormFile({{ .Path }})
+ if err != nil {{ if .IsNullable }}&& err != http.ErrMissingFile{{ end }}{
+ res = append(res, errors.New(400, "reading file %q failed: %v", {{ printf "%q" (camelize .Name) }}, err))
+ {{- if .IsNullable }}
+ } else if err == http.ErrMissingFile {
+ // no-op for missing but optional file parameter
+ {{- end }}
+ } else if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}({{ camelize .Name }}, {{ camelize .Name }}Header); err != nil {
+ {{- if .Required }}
+ // Required: true
+ {{- end }}
+ res = append(res, err)
+ } else {
+ {{ .ReceiverName }}.{{ pascalize .Name }} = &runtime.File{Data: {{ camelize .Name }}, Header: {{ camelize .Name }}Header}
+ }
+ {{- else }}
+
+ fd{{ pascalize .Name }}, fdhk{{ pascalize .Name }}, _ := fds.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(fd{{ pascalize .Name }}, fdhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{- if .IsQueryParam }}
+
+ q{{ pascalize .Name }}, qhk{{ pascalize .Name }}, _ := qs.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(q{{ pascalize .Name }}, qhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if .IsPathParam }}
+
+ r{{ pascalize .Name }}, rhk{{ pascalize .Name }}, _ := route.Params.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(r{{ pascalize .Name }}, rhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if .IsHeaderParam }}
+
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(r.Header[http.CanonicalHeaderKey({{ .Path }})], true, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if and .IsFormParam }}
+
+ fd{{ pascalize .Name }}, fdhk{{ pascalize .Name }}, _ := fds.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(fd{{ pascalize .Name }}, fdhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }}
+
+ {{- if and .IsBodyParam .Schema }}
+
+ if runtime.HasBody(r) {
+ {{- if .Schema.IsStream }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = r.Body
+ {{- else }}
+ defer r.Body.Close()
+ {{- if and .Schema.IsBaseType .Schema.IsExported }}
+ body, err := {{ toPackageName .ModelsPackage }}.Unmarshal{{ dropPackage .GoType }}{{ if .IsArray }}Slice{{ end }}(r.Body, route.Consumer)
+ if err != nil {
+ {{- if .Required }}
+ if err == io.EOF {
+ err = errors.Required({{ .Path }}, {{ printf "%q" .Location }}, "")
+ }
+ {{- end }}
+ res = append(res, err)
+ {{- else }}
+ var body {{ .GoType }}
+ if err := route.Consumer.Consume(r.Body, &body); err != nil {
+ {{- if .Required }}
+ if err == io.EOF {
+ res = append(res, errors.Required({{ printf "%q" (camelize .Name) }}, {{ printf "%q" .Location }}, ""))
+ } else {
+ {{- end }}
+ res = append(res, errors.NewParseError({{ printf "%q" (camelize .Name) }}, {{ printf "%q" .Location }}, "", err))
+ {{- if .Required }}
+ }
+ {{- end }}
+ {{- end }}
+ } else {
+ {{- template "bodyvalidator" . }}
+ }
+ {{- end }}
+ }
+ {{- if .Required }} else {
+ res = append(res, errors.Required({{ printf "%q" (camelize .Name) }}, {{ printf "%q" .Location }}, ""))
+ }
+ {{- end }}
+ {{- end }}
+{{- end }}
+ if len(res) > 0 {
+ return errors.CompositeValidationError(res...)
+ }
+ return nil
+}
+
+{{- $className := (pascalize .Name) }}
+{{ range .Params }}
+ {{- if .IsFileParam }}
+// bind{{ pascalize .ID }} binds file parameter {{ .ID }}.
+//
+// The only supported validations on files are MinLength and MaxLength
+func ({{ .ReceiverName }} *{{ $className }}Params) bind{{ pascalize .ID }}(file multipart.File, header *multipart.FileHeader) error {
+ {{- if or .MinLength .MaxLength }}
+ size, _ := file.Seek(0, io.SeekEnd)
+ file.Seek(0, io.SeekStart)
+ {{- end }}
+ {{- if .MinLength}}
+ if size < {{.MinLength}} {
+ return errors.ExceedsMinimum({{ .Path }}, {{ printf "%q" .Location }}, {{ .MinLength }}, false, size)
+ }
+ {{- end }}
+ {{- if .MaxLength}}
+ if size > {{.MaxLength}} {
+ return errors.ExceedsMaximum({{ .Path }}, {{ printf "%q" .Location }}, {{ .MaxLength }}, false, size)
+ }
+ {{- end }}
+ return nil
+}
+ {{- else if not .IsBodyParam }}
+ {{- if or .IsPrimitive .IsCustomFormatter }}
+
+// bind{{ pascalize .ID }} binds and validates parameter {{ .ID }} from {{ .Location }}.
+func ({{ .ReceiverName }} *{{ $className }}Params) bind{{ pascalize .ID }}(rawData []string, hasKey bool, formats strfmt.Registry) error {
+ {{- if and (not .IsPathParam) .Required }}
+ if !hasKey {
+ return errors.Required({{ .Path }}, {{ printf "%q" .Location }}, rawData)
+ }
+ {{- end }}
+ var raw string
+ if len(rawData) > 0 {
+ raw = rawData[len(rawData)-1]
+ }
+
+ // Required: {{ .Required }}
+ {{- if .IsQueryParam }}
+ // AllowEmptyValue: {{ .AllowEmptyValue }}
+ {{- end }}
+ {{- if .IsPathParam }}
+ // Parameter is provided by construction from the route
+ {{- end }}
+
+ {{- if and (not .IsPathParam) .Required (not .AllowEmptyValue) }}
+
+ if err := validate.RequiredString({{ .Path }}, {{ printf "%q" .Location }}, raw); err != nil {
+ return err
+ }
+ {{- else if and ( not .IsPathParam ) (or (not .Required) .AllowEmptyValue) }}
+
+ if raw == "" { // empty values pass all other validations
+ {{- if .HasDefault }}
+ // Default values have been previously initialized by New{{ $className }}Params()
+ {{- end }}
+ return nil
+ }
+ {{- end }}
+
+ {{- if .Converter }}
+
+ value, err := {{ .Converter }}(raw)
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, {{ printf "%q" .Location }}, {{ printf "%q" .GoType }}, raw)
+ }
+ {{ .ValueExpression }} = {{ if .IsNullable }}&{{ end }}value
+ {{- else if .IsCustomFormatter }}
+
+ // Format: {{ .SwaggerFormat }}
+ value, err := formats.Parse({{ printf "%q" .SwaggerFormat }}, raw)
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, {{ printf "%q" .Location }}, {{ printf "%q" .GoType }}, raw)
+ }
+ {{ .ValueExpression }} = {{ if or .IsArray .HasDiscriminator .IsFileParam .IsStream (not .IsNullable) }}*{{ end }}(value.(*{{ .GoType }}))
+ {{- else}}
+ {{ .ValueExpression }} = {{ if .IsNullable }}&{{ end }}raw
+ {{- end }}
+
+ {{- if .HasValidations }}
+
+ if err := {{ .ReceiverName }}.validate{{ pascalize .ID }}(formats); err != nil {
+ return err
+ }
+ {{- end }}
+
+ return nil
+}
+ {{- else if .IsArray }}
+
+// bind{{ pascalize .ID }} binds and validates array parameter {{ .ID }} from {{ .Location }}.
+//
+// Arrays are parsed according to CollectionFormat: "{{ .CollectionFormat }}" (defaults to "csv" when empty).
+func ({{ .ReceiverName }} *{{ $className }}Params) bind{{ pascalize .ID }}(rawData []string, hasKey bool, formats strfmt.Registry) error {
+ {{- if .Required }}
+ if !hasKey {
+ return errors.Required({{ .Path }}, {{ printf "%q" .Location }}, rawData)
+ }
+ {{- end }}
+ {{- if eq .CollectionFormat "multi" }}
+ // CollectionFormat: {{ .CollectionFormat }}
+ {{ varname .Child.ValueExpression }}C := rawData
+ {{- else }}
+ var qv{{ pascalize .Name }} string
+ if len(rawData) > 0 {
+ qv{{ pascalize .Name }} = rawData[len(rawData) - 1]
+ }
+
+ // CollectionFormat: {{ .CollectionFormat }}
+ {{ varname .Child.ValueExpression }}C := swag.SplitByFormat(qv{{ pascalize .Name }}, {{ printf "%q" .CollectionFormat }})
+ {{- end }}
+ {{- if and .Required (not .AllowEmptyValue) }}
+ if len({{ varname .Child.ValueExpression }}C) == 0 {
+ return errors.Required({{ .Path }}, {{ printf "%q" .Location }}, {{ varname .Child.ValueExpression }}C)
+ }
+ {{- else }}
+ if len({{ varname .Child.ValueExpression }}C) == 0 {
+ {{- if .HasDefault }}
+ // Default values have been previously initialized by New{{ $className }}Params()
+ {{- end }}
+ return nil
+ } {{- end }}
+ {{ template "sliceparambinder" . }}
+ {{ .ValueExpression }} = {{ varname .Child.ValueExpression }}R
+ {{- if .HasSliceValidations }}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .ID }}(formats); err != nil {
+ return err
+ }
+ {{- end }}
+
+ return nil
+}
+ {{- end }}
+
+ {{- if or (and (not .IsArray) .HasValidations) (and .IsArray .HasSliceValidations) }}
+
+// validate{{ pascalize .ID }} carries on validations for parameter {{ .ID }}
+func ({{ .ReceiverName }} *{{ $className }}Params) validate{{ pascalize .ID }}(formats strfmt.Registry) error {
+ {{ template "propertyparamvalidator" . }}
+ return nil
+}
+ {{- end }}
+
+ {{- else if .IsBodyParam }}{{/* validation method for inline body parameters with validations */}}
+ {{- if and .HasSimpleBodyParams (not .HasModelBodyItems) (not .HasModelBodyMap) }}
+ {{- if .Schema.HasValidations }}
+
+// validate{{ pascalize .ID }}Body validates an inline body parameter
+func ({{ .ReceiverName }} *{{ $className }}Params) validate{{ pascalize .ID }}Body(formats strfmt.Registry) error {
+ {{- if .IsArray }}
+ {{- if .HasSliceValidations }}
+ {{- template "sliceparamvalidator" . }}
+ {{- end }}
+ {{- if .Child.HasValidations }}
+ {{ varname .Child.ValueExpression }}C := {{ .ValueExpression }}
+ {{ template "sliceparambinder" . }}
+ {{ .ValueExpression }} = {{ varname .Child.ValueExpression }}R
+ {{- end }}
+ {{- else if .IsMap }}
+ {{ varname .Child.ValueExpression }}C := {{ .ValueExpression }}
+ {{ template "mapparamvalidator" . }}
+ {{ .ValueExpression }} = {{ varname .Child.ValueExpression }}R
+ {{- else }}
+ {{ template "propertyparamvalidator" . }}
+ {{- end }}
+ return nil
+}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/responses.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/responses.gotmpl
new file mode 100644
index 000000000..1d844a890
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/responses.gotmpl
@@ -0,0 +1,271 @@
+{{ define "serverheaderbuilder" }}
+{{ if not .IsArray }}{{ template "simpleserverheaderbuilder" . }}{{ else }}{{ template "sliceserverheaderbuilder" . }}{{ end }}
+{{- end }}
+{{ define "simpleserverheaderbuilder" }}
+{{ if .IsNullable -}}
+var {{ varname .ID }} string
+if {{ .ReceiverName }}.{{ pascalize .ID }} != nil {
+ {{ varname .ID }} = {{ if .Formatter }}{{ .Formatter }}(*{{ .ReceiverName }}.{{ pascalize .ID }}){{ else }}{{ if not .IsCustomFormatter }}*{{ end }}{{ .ReceiverName }}.{{ pascalize .ID }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+}
+{{ else }}{{ varname .ID }} := {{ if .Formatter }}{{ .Formatter }}({{ .ReceiverName }}.{{ pascalize .ID }}){{ else }}{{ .ReceiverName }}.{{ pascalize .ID }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+{{ end -}}
+if {{ varname .ID }} != "" {
+ rw.Header().Set({{ printf "%q" .Name }}, {{ varname .ID }})
+}
+{{ end }}
+{{ define "sliceitemserverheaderbuilder" }}
+{{ if .IsNullable -}}
+var {{ .ValueExpression }}S string
+if {{ .ValueExpression }} != nil {
+ {{ .ValueExpression }}S = {{ if .Formatter }}{{ .Formatter }}(*{{ .ValueExpression }}){{ else }}*{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+}
+{{ else -}}
+{{ .ValueExpression }}S := {{ if .Formatter }}{{ .Formatter }}({{ .ValueExpression }}){{ else }}{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+{{ end -}}
+if {{ .ValueExpression }}S != "" {
+ {{ .ValueExpression }}R = append({{ .ValueExpression }}R, {{ .ValueExpression }}S)
+}
+{{ end }}
+{{define "sliceserverheaderbuilder" }}
+var {{ varname .Child.ValueExpression }}R []string
+for _, {{ varname .Child.ValueExpression }} := range {{ .ValueExpression }} {
+ {{- if not .Child.IsArray }}{{ template "sliceitemserverheaderbuilder" .Child }}{{ else }}{{ template "sliceserverheaderbuilder" .Child }}{{ end -}}
+}
+{{ if not .Child.Parent -}}
+{{ varname .ID }} := swag.JoinByFormat({{ varname .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+if len({{ varname .ID }}) > 0 {
+ hv := {{ varname .ID }}[0]
+ if hv != "" {
+ rw.Header().Set({{ printf "%q" .Name }}, hv)
+ }
+}
+{{ else -}}
+{{ .ValueExpression }}S := swag.JoinByFormat({{ varname .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+if len({{ .ValueExpression }}S) > 0 {
+ {{ .ValueExpression }}Ss := {{ .ValueExpression }}S[0]
+ if {{ .ValueExpression }}Ss != "" {
+ {{ .ValueExpression }}R = append({{ .ValueExpression }}R, {{ .ValueExpression }}Ss)
+ }
+}
+{{ end -}}
+{{ end -}}
+{{ define "serverresponse" }}
+{{ if ne .Code -1 }}// {{pascalize .Name}}Code is the HTTP code returned for type {{ pascalize .Name}}
+const {{ pascalize .Name}}Code int = {{ .Code }}{{ end }}
+
+/*{{ if .Description }}{{ pascalize .Name }} {{ blockcomment .Description }}{{else}}{{ pascalize .Name }} {{ humanize .Name }}{{end}}
+
+swagger:response {{ camelize .Name }}
+*/
+type {{ pascalize .Name }} struct {
+ {{ if eq .Code -1 }}
+ _statusCode int
+ {{ end }}{{ range .Headers }}/*{{if .Description }}{{ blockcomment .Description }}{{ end }}
+ {{ if .Maximum }}
+ Maximum: {{ if .ExclusiveMaximum }}< {{ end }}{{ .Maximum }}{{ end }}{{ if .Minimum }}
+ Minimum: {{ if .ExclusiveMinimum }}> {{ end }}{{ .Minimum }}{{ end }}{{ if .MultipleOf }}
+ Multiple Of: {{ .MultipleOf }}{{ end }}{{ if .MaxLength }}
+ Max Length: {{ .MaxLength }}{{ end }}{{ if .MinLength }}
+ Min Length: {{ .MinLength }}{{ end }}{{ if .Pattern }}
+ Pattern: {{ .Pattern }}{{ end }}{{ if .MaxItems }}
+ Max Items: {{ .MaxItems }}{{ end }}{{ if .MinItems }}
+ Min Items: {{ .MinItems }}{{ end }}{{ if .UniqueItems }}
+ Unique: true{{ end }}{{ if .HasDefault }}
+ Default: {{ printf "%#v" .Default }}{{ end }}
+ */
+ {{ pascalize .Name }} {{ .GoType }} `json:"{{.Name}}{{ if not .Required }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end }}
+ {{ if .Schema }}{{ with .Schema }}
+ /*{{if .Description }}{{ blockcomment .Description }}{{ end }}{{ if .Maximum }}
+ Maximum: {{ if .ExclusiveMaximum }}< {{ end }}{{ .Maximum }}{{ end }}{{ if .Minimum }}
+ Minimum: {{ if .ExclusiveMinimum }}> {{ end }}{{ .Minimum }}{{ end }}{{ if .MultipleOf }}
+ Multiple Of: {{ .MultipleOf }}{{ end }}{{ if .MaxLength }}
+ Max Length: {{ .MaxLength }}{{ end }}{{ if .MinLength }}
+ Min Length: {{ .MinLength }}{{ end }}{{ if .Pattern }}
+ Pattern: {{ .Pattern }}{{ end }}{{ if .MaxItems }}
+ Max Items: {{ .MaxItems }}{{ end }}{{ if .MinItems }}
+ Min Items: {{ .MinItems }}{{ end }}{{ if .UniqueItems }}
+ Unique: true{{ end }}
+ In: Body
+ */{{ end }}
+ Payload {{ if and (not .Schema.IsBaseType) .Schema.IsComplexObject }}*{{ end }}{{ .Schema.GoType }} `json:"body,omitempty"`
+ {{ end }}
+}
+
+// New{{ pascalize .Name }} creates {{ pascalize .Name }} with default headers values
+func New{{ pascalize .Name }}({{ if eq .Code -1 }}code int{{ end }}) *{{ pascalize .Name }} { {{ if eq .Code -1 }}
+if code <= 0 {
+ code = 500
+ }
+{{ end }}
+{{ if .Headers.HasSomeDefaults }}
+ var (
+ // initialize headers with default values
+ {{ range .Headers }}
+ {{ if .HasDefault -}}
+ {{ varname .ID}}Default =
+ {{- if and .IsPrimitive .IsCustomFormatter (not (stringContains .Zero "(\"" )) }}{{ .Zero }}{{/* strfmt type initializer requires UnmarshalText(), e.g. Date, Datetime, Duration */}}
+ {{- else if and .IsPrimitive .IsCustomFormatter (stringContains .Zero "(\"" ) }}{{.GoType}}({{- printf "%#v" .Default }}){{/* strfmt type initializer takes string */}}
+ {{- else if and .IsPrimitive (not .IsCustomFormatter) -}}{{.GoType}}({{- printf "%#v" .Default }}){{/* regular go primitive type initializer */}}
+ {{- else if .IsArray -}}{{- /* Do not initialize from possible defaults in nested arrays */ -}}
+ {{- if and .Child.IsPrimitive .Child.IsCustomFormatter }}{{ .Zero }}{{/* initialization strategy with UnmarshalText() */}}
+ {{- else if .Child.IsArray -}}{{ .Zero }}{{/* initialization strategy with json.Unmarshal() */}}
+ {{- else if and .Child.IsPrimitive (not .Child.IsCustomFormatter) -}}{{.GoType}}{{- arrayInitializer .Default }}{{/* regular go primitive type initializer: simple slice initializer */}}
+ {{- else }}{{ printf "%#v" .Default }}{{/* all other cases (e.g. schema) [should not occur] */}}
+ {{- end }}
+ {{- else }}{{ printf "%#v" .Default }}{{/* case .Schema */}}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ )
+
+{{ range .Headers }}{{ if .HasDefault -}}{{- /* carry out UnmarshalText initialization strategy */ -}}
+ {{ if and .IsPrimitive .IsCustomFormatter (not (stringContains .Zero "(\"")) }}{{ varname .ID}}Default.UnmarshalText([]byte({{ printf "%q" .Default }}))
+ {{ else if .IsArray -}}
+ {{ if or ( and .Child.IsPrimitive .Child.IsCustomFormatter ) .Child.IsArray -}}
+ if err := json.Unmarshal([]byte(`{{printf "%s" (json .Default)}}`), &{{ varname .ID }}Default); err != nil {
+ // panics if specification is invalid
+ msg := fmt.Sprintf("invalid default value for header {{ varname .ID }}: %v",err)
+ panic(msg)
+ }
+ {{ end -}}
+ {{- end }}
+ {{- end }}
+{{- end }}
+{{ end }}
+ return &{{ pascalize .Name }}{
+ {{ if eq .Code -1 }}_statusCode: code,{{ end }}
+ {{ range .Headers }}{{ if .HasDefault }}
+ {{ pascalize .Name}}: {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}&{{ end }}{{ varname .ID }}Default,
+ {{ end }}
+ {{ end -}}
+ }
+}
+
+{{ if eq .Code -1 }}
+// WithStatusCode adds the status to the {{ humanize .Name }} response
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) WithStatusCode(code int) *{{ pascalize .Name }} {
+ {{ .ReceiverName }}._statusCode = code
+ return {{ .ReceiverName }}
+}
+
+// SetStatusCode sets the status to the {{ humanize .Name }} response
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) SetStatusCode(code int) {
+ {{ .ReceiverName }}._statusCode = code
+}
+{{ end }}{{ range .Headers }}
+// With{{ pascalize .Name }} adds the {{ camelize .Name }} to the {{ humanize $.Name }} response
+func ({{ $.ReceiverName }} *{{ pascalize $.Name }}) With{{ pascalize .Name }}({{ varname .Name }} {{ .GoType}}) *{{ pascalize $.Name }} {
+ {{ $.ReceiverName }}.{{ pascalize .Name }} = {{ varname .Name }}
+ return {{ .ReceiverName }}
+}
+
+// Set{{ pascalize .Name }} sets the {{ camelize .Name }} to the {{ humanize $.Name }} response
+func ({{ $.ReceiverName }} *{{ pascalize $.Name }}) Set{{ pascalize .Name }}({{ varname .Name }} {{ .GoType}}) {
+ {{ $.ReceiverName }}.{{ pascalize .Name }} = {{ varname .Name }}
+}
+{{ end }}{{ if .Schema }}
+// WithPayload adds the payload to the {{ humanize .Name }} response
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) WithPayload(payload {{ if and .Schema.IsComplexObject (not .Schema.IsBaseType) }}*{{ end }}{{ .Schema.GoType }}) *{{ pascalize .Name }} {
+ {{ .ReceiverName }}.Payload = payload
+ return {{ .ReceiverName }}
+}
+
+// SetPayload sets the payload to the {{ humanize .Name }} response
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) SetPayload(payload {{ if and .Schema.IsComplexObject (not .Schema.IsBaseType) }}*{{ end }}{{ .Schema.GoType }}) {
+ {{ .ReceiverName }}.Payload = payload
+}
+{{ end }}
+
+// WriteResponse to the client
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {
+ {{ range .Headers }}
+ // response header {{.Name}}
+ {{ template "serverheaderbuilder" . -}}
+ {{ end }}
+ {{ if not .Schema }}
+ rw.Header().Del(runtime.HeaderContentType) //Remove Content-Type on empty responses
+ {{ end }}
+ rw.WriteHeader({{ if eq .Code -1 }}{{ .ReceiverName }}._statusCode{{ else }}{{ .Code }}{{ end }})
+ {{- if .Schema }}
+ {{- if .Schema.IsComplexObject }}
+ if {{ .ReceiverName }}.Payload != nil {
+ {{- end }}
+ payload := {{ .ReceiverName }}.Payload
+ {{- if and (not .Schema.IsInterface) (or .Schema.IsArray .Schema.IsMap) }}
+ if payload == nil {
+ // return empty {{ if .Schema.IsArray }}array{{ else if .Schema.IsMap }}map{{ end }}
+ payload =
+ {{- if or .Schema.IsAliased .Schema.IsComplexObject }}
+ {{- if and (not .Schema.IsBaseType) .Schema.IsComplexObject }}&{{ end }}{{ .Schema.GoType -}} {}
+ {{- else }}
+ {{- .Schema.Zero }}
+ {{- end }}
+ }
+ {{ end }}
+ if err := producer.Produce(rw, payload); err != nil {
+ panic(err) // let the recovery middleware deal with this
+ }
+ {{- if .Schema.IsComplexObject }}
+ }
+ {{- end }}
+ {{- end }}
+}
+
+{{ if $.StrictResponders }}
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) {{ pascalize .OperationName }}Responder() {}
+{{- end }}
+{{ end }}// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+
+import (
+ "fmt"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/security"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+ "github.com/go-openapi/runtime/middleware"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+{{ range .Responses }}
+{{ template "serverresponse" . }}
+{{ end }}
+{{ if .DefaultResponse }}
+{{ template "serverresponse" .DefaultResponse }}
+{{ end }}
+
+{{ if $.StrictResponders }}
+type {{ pascalize .Name }}NotImplementedResponder struct {
+ middleware.Responder
+}
+
+func (*{{ pascalize .Name }}NotImplementedResponder) {{ pascalize .Name }}Responder() {}
+
+func {{ pascalize .Name }}NotImplemented() {{ pascalize .Name }}Responder {
+ return &{{ pascalize .Name }}NotImplementedResponder{
+ middleware.NotImplemented(
+ "operation authentication.{{ pascalize .Name }} has not yet been implemented",
+ ),
+ }
+}
+
+type {{ pascalize .Name }}Responder interface {
+ middleware.Responder
+ {{ pascalize .Name }}Responder()
+}
+{{ end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl
new file mode 100644
index 000000000..c78d22051
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl
@@ -0,0 +1,660 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+import (
+ "context"
+ "crypto/tls"
+ "crypto/x509"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "net"
+ "net/http"
+ "os"
+ "os/signal"
+ "strconv"
+ "sync"
+ "sync/atomic"
+ "syscall"
+ "time"
+
+ "github.com/go-openapi/swag"
+ {{ if .UseGoStructFlags }}flags "github.com/jessevdk/go-flags"
+ {{ end -}}
+ "github.com/go-openapi/runtime/flagext"
+ {{ if .UsePFlags }}flag "github.com/spf13/pflag"
+ {{ end -}}
+ {{ if .UseFlags }}"flag"
+ "strings"
+ {{ end -}}
+ "golang.org/x/net/netutil"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+const (
+ schemeHTTP = "http"
+ schemeHTTPS = "https"
+ schemeUnix = "unix"
+)
+
+var defaultSchemes []string
+
+func init() {
+ defaultSchemes = []string{ {{ if (hasInsecure .Schemes) }}
+ schemeHTTP,{{ end}}{{ if (hasSecure .Schemes) }}
+ schemeHTTPS,{{ end }}{{ if (contains .ExtraSchemes "unix") }}
+ schemeUnix,{{ end }}
+ }
+}
+
+{{ if not .UseGoStructFlags}}
+var ({{ if .ExcludeSpec }}
+ specFile string
+ {{ end }}enabledListeners []string
+ cleanupTimeout time.Duration
+ gracefulTimeout time.Duration
+ maxHeaderSize flagext.ByteSize
+
+ socketPath string
+
+ host string
+ port int
+ listenLimit int
+ keepAlive time.Duration
+ readTimeout time.Duration
+ writeTimeout time.Duration
+
+ tlsHost string
+ tlsPort int
+ tlsListenLimit int
+ tlsKeepAlive time.Duration
+ tlsReadTimeout time.Duration
+ tlsWriteTimeout time.Duration
+ tlsCertificate string
+ tlsCertificateKey string
+ tlsCACertificate string
+)
+
+{{ if .UseFlags}}
+// StringSliceVar support for flag
+type sliceValue []string
+
+func newSliceValue(vals []string, p *[]string) *sliceValue {
+ *p = vals
+ return (*sliceValue)(p)
+}
+
+func (s *sliceValue) Set(val string) error {
+ *s = sliceValue(strings.Split(val, ","))
+ return nil
+}
+
+func (s *sliceValue) Get() interface{} { return []string(*s) }
+
+func (s *sliceValue) String() string { return strings.Join([]string(*s), ",") }
+// end StringSliceVar support for flag
+{{ end }}
+
+func init() {
+ maxHeaderSize = flagext.ByteSize(1000000){{ if .ExcludeSpec }}
+ flag.StringVarP(&specFile, "spec", "", "", "the swagger specification to serve")
+ {{ end }}
+ {{ if .UseFlags }}
+ flag.Var(newSliceValue(defaultSchemes, &enabledListeners), "schema", "the listeners to enable, this can be repeated and defaults to the schemes in the swagger spec")
+ {{ end }}
+ {{ if .UsePFlags }}
+ flag.StringSliceVar(&enabledListeners, "scheme", defaultSchemes, "the listeners to enable, this can be repeated and defaults to the schemes in the swagger spec")
+ {{ end }}
+ flag.DurationVar(&cleanupTimeout, "cleanup-timeout", 10*time.Second, "grace period for which to wait before killing idle connections")
+ flag.DurationVar(&gracefulTimeout, "graceful-timeout", 15*time.Second, "grace period for which to wait before shutting down the server")
+ flag.Var(&maxHeaderSize, "max-header-size", "controls the maximum number of bytes the server will read parsing the request header's keys and values, including the request line. It does not limit the size of the request body")
+
+ flag.StringVar(&socketPath, "socket-path", "/var/run/todo-list.sock", "the unix socket to listen on")
+
+ flag.StringVar(&host, "host", "localhost", "the IP to listen on")
+ flag.IntVar(&port, "port", 0, "the port to listen on for insecure connections, defaults to a random value")
+ flag.IntVar(&listenLimit, "listen-limit", 0, "limit the number of outstanding requests")
+ flag.DurationVar(&keepAlive, "keep-alive", 3*time.Minute, "sets the TCP keep-alive timeouts on accepted connections. It prunes dead TCP connections ( e.g. closing laptop mid-download)")
+ flag.DurationVar(&readTimeout, "read-timeout", 30*time.Second, "maximum duration before timing out read of the request")
+ flag.DurationVar(&writeTimeout, "write-timeout", 30*time.Second, "maximum duration before timing out write of the response")
+
+ flag.StringVar(&tlsHost, "tls-host", "localhost", "the IP to listen on")
+ flag.IntVar(&tlsPort, "tls-port", 0, "the port to listen on for secure connections, defaults to a random value")
+ flag.StringVar(&tlsCertificate, "tls-certificate", "", "the certificate file to use for secure connections")
+ flag.StringVar(&tlsCertificateKey, "tls-key", "", "the private key file to use for secure connections (without passphrase)")
+ flag.StringVar(&tlsCACertificate, "tls-ca", "", "the certificate authority certificate file to be used with mutual tls auth")
+ flag.IntVar(&tlsListenLimit, "tls-listen-limit", 0, "limit the number of outstanding requests")
+ flag.DurationVar(&tlsKeepAlive, "tls-keep-alive", 3*time.Minute, "sets the TCP keep-alive timeouts on accepted connections. It prunes dead TCP connections ( e.g. closing laptop mid-download)")
+ flag.DurationVar(&tlsReadTimeout, "tls-read-timeout", 30*time.Second, "maximum duration before timing out read of the request")
+ flag.DurationVar(&tlsWriteTimeout, "tls-write-timeout", 30*time.Second, "maximum duration before timing out write of the response")
+}
+
+func stringEnvOverride(orig string, def string, keys ...string) string {
+ for _, k := range keys {
+ if os.Getenv(k) != "" {
+ return os.Getenv(k)
+ }
+ }
+ if def != "" && orig == "" {
+ return def
+ }
+ return orig
+}
+
+func intEnvOverride(orig int, def int, keys ...string) int {
+ for _, k := range keys {
+ if os.Getenv(k) != "" {
+ v, err := strconv.Atoi(os.Getenv(k))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, k, "is not a valid number")
+ os.Exit(1)
+ }
+ return v
+ }
+ }
+ if def != 0 && orig == 0 {
+ return def
+ }
+ return orig
+}
+{{ end }}
+
+// NewServer creates a new api {{ humanize .Name }} server but does not configure it
+func NewServer(api *{{ .APIPackageAlias }}.{{ pascalize .Name }}API) *Server {
+ s := new(Server)
+ {{ if not .UseGoStructFlags }}
+ s.EnabledListeners = enabledListeners
+ s.CleanupTimeout = cleanupTimeout
+ s.GracefulTimeout = gracefulTimeout
+ s.MaxHeaderSize = maxHeaderSize
+ s.SocketPath = socketPath
+ s.Host = stringEnvOverride(host, "", "HOST")
+ s.Port = intEnvOverride(port, 0, "PORT")
+ s.ListenLimit = listenLimit
+ s.KeepAlive = keepAlive
+ s.ReadTimeout = readTimeout
+ s.WriteTimeout = writeTimeout
+ s.TLSHost = stringEnvOverride(tlsHost, s.Host, "TLS_HOST", "HOST")
+ s.TLSPort = intEnvOverride(tlsPort, 0, "TLS_PORT")
+ s.TLSCertificate = stringEnvOverride(tlsCertificate, "", "TLS_CERTIFICATE")
+ s.TLSCertificateKey = stringEnvOverride(tlsCertificateKey, "", "TLS_PRIVATE_KEY")
+ s.TLSCACertificate = stringEnvOverride(tlsCACertificate, "", "TLS_CA_CERTIFICATE")
+ s.TLSListenLimit = tlsListenLimit
+ s.TLSKeepAlive = tlsKeepAlive
+ s.TLSReadTimeout = tlsReadTimeout
+ s.TLSWriteTimeout = tlsWriteTimeout
+ {{- if .ExcludeSpec }}
+ s.Spec = specFile
+ {{- end }}
+ {{- end }}
+ s.shutdown = make(chan struct{})
+ s.api = api
+ s.interrupt = make(chan os.Signal, 1)
+ return s
+}
+
+// ConfigureAPI configures the API and handlers.
+func (s *Server) ConfigureAPI() {
+ if s.api != nil {
+ s.handler = configureAPI(s.api)
+ }
+}
+
+// ConfigureFlags configures the additional flags defined by the handlers. Needs to be called before the parser.Parse
+func (s *Server) ConfigureFlags() {
+ if s.api != nil {
+ configureFlags(s.api)
+ }
+}
+
+// Server for the {{ humanize .Name }} API
+type Server struct {
+ EnabledListeners []string{{ if .UseGoStructFlags }} `long:"scheme" description:"the listeners to enable, this can be repeated and defaults to the schemes in the swagger spec"`{{ end }}
+ CleanupTimeout time.Duration{{ if .UseGoStructFlags }} `long:"cleanup-timeout" description:"grace period for which to wait before killing idle connections" default:"10s"`{{ end }}
+ GracefulTimeout time.Duration{{ if .UseGoStructFlags }} `long:"graceful-timeout" description:"grace period for which to wait before shutting down the server" default:"15s"`{{ end }}
+ MaxHeaderSize flagext.ByteSize{{ if .UseGoStructFlags }} `long:"max-header-size" description:"controls the maximum number of bytes the server will read parsing the request header's keys and values, including the request line. It does not limit the size of the request body." default:"1MiB"`{{ end }}
+
+ SocketPath {{ if not .UseGoStructFlags }}string{{ else }}flags.Filename `long:"socket-path" description:"the unix socket to listen on" default:"/var/run/{{ dasherize .Name }}.sock"`{{ end }}
+ domainSocketL net.Listener
+
+ Host string{{ if .UseGoStructFlags }} `long:"host" description:"the IP to listen on" default:"localhost" env:"HOST"`{{ end }}
+ Port int{{ if .UseGoStructFlags }} `long:"port" description:"the port to listen on for insecure connections, defaults to a random value" env:"PORT"`{{ end }}
+ ListenLimit int{{ if .UseGoStructFlags }} `long:"listen-limit" description:"limit the number of outstanding requests"`{{ end }}
+ KeepAlive time.Duration{{ if .UseGoStructFlags }} `long:"keep-alive" description:"sets the TCP keep-alive timeouts on accepted connections. It prunes dead TCP connections ( e.g. closing laptop mid-download)" default:"3m"`{{ end }}
+ ReadTimeout time.Duration{{ if .UseGoStructFlags }} `long:"read-timeout" description:"maximum duration before timing out read of the request" default:"30s"`{{ end }}
+ WriteTimeout time.Duration{{ if .UseGoStructFlags }} `long:"write-timeout" description:"maximum duration before timing out write of the response" default:"60s"`{{ end }}
+ httpServerL net.Listener
+
+ TLSHost string{{ if .UseGoStructFlags }} `long:"tls-host" description:"the IP to listen on for tls, when not specified it's the same as --host" env:"TLS_HOST"`{{ end }}
+ TLSPort int{{ if .UseGoStructFlags }} `long:"tls-port" description:"the port to listen on for secure connections, defaults to a random value" env:"TLS_PORT"`{{ end }}
+ TLSCertificate {{ if not .UseGoStructFlags }}string{{ else }}flags.Filename `long:"tls-certificate" description:"the certificate to use for secure connections" env:"TLS_CERTIFICATE"`{{ end }}
+ TLSCertificateKey {{ if not .UseGoStructFlags }}string{{ else }}flags.Filename `long:"tls-key" description:"the private key to use for secure connections" env:"TLS_PRIVATE_KEY"`{{ end }}
+ TLSCACertificate {{ if not .UseGoStructFlags }}string{{ else }}flags.Filename `long:"tls-ca" description:"the certificate authority file to be used with mutual tls auth" env:"TLS_CA_CERTIFICATE"`{{ end }}
+ TLSListenLimit int{{ if .UseGoStructFlags }} `long:"tls-listen-limit" description:"limit the number of outstanding requests"`{{ end }}
+ TLSKeepAlive time.Duration{{ if .UseGoStructFlags }} `long:"tls-keep-alive" description:"sets the TCP keep-alive timeouts on accepted connections. It prunes dead TCP connections ( e.g. closing laptop mid-download)"`{{ end }}
+ TLSReadTimeout time.Duration{{ if .UseGoStructFlags }} `long:"tls-read-timeout" description:"maximum duration before timing out read of the request"`{{ end }}
+ TLSWriteTimeout time.Duration{{ if .UseGoStructFlags }} `long:"tls-write-timeout" description:"maximum duration before timing out write of the response"`{{ end }}
+ httpsServerL net.Listener
+
+ {{ if .ExcludeSpec }}Spec {{ if not .UseGoStructFlags }}string{{ else }}flags.Filename `long:"spec" description:"the swagger specification to serve"`{{ end }}{{ end }}
+ api *{{ .APIPackageAlias }}.{{ pascalize .Name }}API
+ handler http.Handler
+ hasListeners bool
+ shutdown chan struct{}
+ shuttingDown int32
+ interrupted bool
+ interrupt chan os.Signal
+}
+
+// Logf logs message either via defined user logger or via system one if no user logger is defined.
+func (s *Server) Logf(f string, args ...interface{}) {
+ if s.api != nil && s.api.Logger != nil {
+ s.api.Logger(f, args...)
+ } else {
+ log.Printf(f, args...)
+ }
+}
+
+// Fatalf logs message either via defined user logger or via system one if no user logger is defined.
+// Exits with non-zero status after printing
+func (s *Server) Fatalf(f string, args ...interface{}) {
+ if s.api != nil && s.api.Logger != nil {
+ s.api.Logger(f, args...)
+ os.Exit(1)
+ } else {
+ log.Fatalf(f, args...)
+ }
+}
+
+// SetAPI configures the server with the specified API. Needs to be called before Serve
+func (s *Server) SetAPI(api *{{ .APIPackageAlias }}.{{ pascalize .Name }}API) {
+ if api == nil {
+ s.api = nil
+ s.handler = nil
+ return
+ }
+
+ s.api = api
+ s.handler = configureAPI(api)
+}
+
+func (s *Server) hasScheme(scheme string) bool {
+ schemes := s.EnabledListeners
+ if len(schemes) == 0 {
+ schemes = defaultSchemes
+ }
+
+ for _, v := range schemes {
+ if v == scheme {
+ return true
+ }
+ }
+ return false
+}
+
+// Serve the api
+func (s *Server) Serve() (err error) {
+ if !s.hasListeners {
+ if err = s.Listen(); err != nil {
+ return err
+ }
+ }
+
+ // set default handler, if none is set
+ if s.handler == nil {
+ if s.api == nil {
+ return errors.New("can't create the default handler, as no api is set")
+ }
+
+ s.SetHandler(s.api.Serve(nil))
+ }
+
+ wg := new(sync.WaitGroup)
+ once := new(sync.Once)
+ signalNotify(s.interrupt)
+ go handleInterrupt(once, s)
+
+ servers := []*http.Server{}
+
+ if s.hasScheme(schemeUnix) {
+ domainSocket := new(http.Server)
+ domainSocket.MaxHeaderBytes = int(s.MaxHeaderSize)
+ domainSocket.Handler = s.handler
+ if int64(s.CleanupTimeout) > 0 {
+ domainSocket.IdleTimeout = s.CleanupTimeout
+ }
+
+ configureServer(domainSocket, "unix", string(s.SocketPath))
+
+ servers = append(servers, domainSocket)
+ wg.Add(1)
+ s.Logf("Serving {{ humanize .Name }} at unix://%s", s.SocketPath)
+ go func(l net.Listener){
+ defer wg.Done()
+ if err := domainSocket.Serve(l); err != nil && err != http.ErrServerClosed {
+ s.Fatalf("%v", err)
+ }
+ s.Logf("Stopped serving {{ humanize .Name }} at unix://%s", s.SocketPath)
+ }(s.domainSocketL)
+ }
+
+ if s.hasScheme(schemeHTTP) {
+ httpServer := new(http.Server)
+ httpServer.MaxHeaderBytes = int(s.MaxHeaderSize)
+ httpServer.ReadTimeout = s.ReadTimeout
+ httpServer.WriteTimeout = s.WriteTimeout
+ httpServer.SetKeepAlivesEnabled(int64(s.KeepAlive) > 0)
+ if s.ListenLimit > 0 {
+ s.httpServerL = netutil.LimitListener(s.httpServerL, s.ListenLimit)
+ }
+
+ if int64(s.CleanupTimeout) > 0 {
+ httpServer.IdleTimeout = s.CleanupTimeout
+ }
+
+ httpServer.Handler = s.handler
+
+ configureServer(httpServer, "http", s.httpServerL.Addr().String())
+
+ servers = append(servers, httpServer)
+ wg.Add(1)
+ s.Logf("Serving {{ humanize .Name }} at http://%s", s.httpServerL.Addr())
+ go func(l net.Listener) {
+ defer wg.Done()
+ if err := httpServer.Serve(l); err != nil && err != http.ErrServerClosed {
+ s.Fatalf("%v", err)
+ }
+ s.Logf("Stopped serving {{ humanize .Name }} at http://%s", l.Addr())
+ }(s.httpServerL)
+ }
+
+ if s.hasScheme(schemeHTTPS) {
+ httpsServer := new(http.Server)
+ httpsServer.MaxHeaderBytes = int(s.MaxHeaderSize)
+ httpsServer.ReadTimeout = s.TLSReadTimeout
+ httpsServer.WriteTimeout = s.TLSWriteTimeout
+ httpsServer.SetKeepAlivesEnabled(int64(s.TLSKeepAlive) > 0)
+ if s.TLSListenLimit > 0 {
+ s.httpsServerL = netutil.LimitListener(s.httpsServerL, s.TLSListenLimit)
+ }
+ if int64(s.CleanupTimeout) > 0 {
+ httpsServer.IdleTimeout = s.CleanupTimeout
+ }
+ httpsServer.Handler = s.handler
+
+ // Inspired by https://blog.bracebin.com/achieving-perfect-ssl-labs-score-with-go
+ httpsServer.TLSConfig = &tls.Config{
+ // Causes servers to use Go's default ciphersuite preferences,
+ // which are tuned to avoid attacks. Does nothing on clients.
+ PreferServerCipherSuites: true,
+ // Only use curves which have assembly implementations
+ // https://github.com/golang/go/tree/master/src/crypto/elliptic
+ CurvePreferences: []tls.CurveID{tls.CurveP256},
+ {{- if .UseModernMode }}
+ // Use modern tls mode https://wiki.mozilla.org/Security/Server_Side_TLS#Modern_compatibility
+ NextProtos: []string{"h2", "http/1.1"},
+ // https://www.owasp.org/index.php/Transport_Layer_Protection_Cheat_Sheet#Rule_-_Only_Support_Strong_Protocols
+ MinVersion: tls.VersionTLS12,
+ // These ciphersuites support Forward Secrecy: https://en.wikipedia.org/wiki/Forward_secrecy
+ CipherSuites: []uint16{
+ tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
+ tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
+ tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
+ tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
+ tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,
+ tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,
+ },
+ {{- end }}
+ }
+
+ // build standard config from server options
+ if s.TLSCertificate != "" && s.TLSCertificateKey != "" {
+ httpsServer.TLSConfig.Certificates = make([]tls.Certificate, 1)
+ httpsServer.TLSConfig.Certificates[0], err = tls.LoadX509KeyPair({{ if .UseGoStructFlags }}string({{ end }}s.TLSCertificate{{ if .UseGoStructFlags }}){{ end }}, {{ if .UseGoStructFlags }}string({{ end }}s.TLSCertificateKey{{ if .UseGoStructFlags }}){{ end }})
+ if err != nil {
+ return err
+ }
+ }
+
+ if s.TLSCACertificate != "" {
+ // include specified CA certificate
+ caCert, caCertErr := os.ReadFile({{ if .UseGoStructFlags }}string({{ end }}s.TLSCACertificate{{ if .UseGoStructFlags }}){{ end }})
+ if caCertErr != nil {
+ return caCertErr
+ }
+ caCertPool := x509.NewCertPool()
+ ok := caCertPool.AppendCertsFromPEM(caCert)
+ if !ok {
+ return fmt.Errorf("cannot parse CA certificate")
+ }
+ httpsServer.TLSConfig.ClientCAs = caCertPool
+ httpsServer.TLSConfig.ClientAuth = tls.RequireAndVerifyClientCert
+ }
+
+ // call custom TLS configurator
+ configureTLS(httpsServer.TLSConfig)
+
+ if len(httpsServer.TLSConfig.Certificates) == 0 && httpsServer.TLSConfig.GetCertificate == nil {
+ // after standard and custom config are passed, this ends up with no certificate
+ if s.TLSCertificate == "" {
+ if s.TLSCertificateKey == "" {
+ s.Fatalf("the required flags `--tls-certificate` and `--tls-key` were not specified")
+ }
+ s.Fatalf("the required flag `--tls-certificate` was not specified")
+ }
+ if s.TLSCertificateKey == "" {
+ s.Fatalf("the required flag `--tls-key` was not specified")
+ }
+ // this happens with a wrong custom TLS configurator
+ s.Fatalf("no certificate was configured for TLS")
+ }
+
+ configureServer(httpsServer, "https", s.httpsServerL.Addr().String())
+
+ servers = append(servers, httpsServer)
+ wg.Add(1)
+ s.Logf("Serving {{ humanize .Name }} at https://%s", s.httpsServerL.Addr())
+ go func(l net.Listener) {
+ defer wg.Done()
+ if err := httpsServer.Serve(l); err != nil && err != http.ErrServerClosed {
+ s.Fatalf("%v", err)
+ }
+ s.Logf("Stopped serving {{ humanize .Name }} at https://%s", l.Addr())
+ }(tls.NewListener(s.httpsServerL, httpsServer.TLSConfig))
+ }
+
+ wg.Add(1)
+ go s.handleShutdown(wg, &servers)
+
+ wg.Wait()
+ return nil
+}
+
+// Listen creates the listeners for the server
+func (s *Server) Listen() error {
+ if s.hasListeners { // already done this
+ return nil
+ }
+
+ if s.hasScheme(schemeHTTPS) {
+ // Use http host if https host wasn't defined
+ if s.TLSHost == "" {
+ s.TLSHost = s.Host
+ }
+ // Use http listen limit if https listen limit wasn't defined
+ if s.TLSListenLimit == 0 {
+ s.TLSListenLimit = s.ListenLimit
+ }
+ // Use http tcp keep alive if https tcp keep alive wasn't defined
+ if int64(s.TLSKeepAlive) == 0 {
+ s.TLSKeepAlive = s.KeepAlive
+ }
+ // Use http read timeout if https read timeout wasn't defined
+ if int64(s.TLSReadTimeout) == 0 {
+ s.TLSReadTimeout = s.ReadTimeout
+ }
+ // Use http write timeout if https write timeout wasn't defined
+ if int64(s.TLSWriteTimeout) == 0 {
+ s.TLSWriteTimeout = s.WriteTimeout
+ }
+ }
+
+ if s.hasScheme(schemeUnix) {
+ domSockListener, err := net.Listen("unix", string(s.SocketPath))
+ if err != nil {
+ return err
+ }
+ s.domainSocketL = domSockListener
+ }
+
+ if s.hasScheme(schemeHTTP) {
+ listener, err := net.Listen("tcp", net.JoinHostPort(s.Host, strconv.Itoa(s.Port)))
+ if err != nil {
+ return err
+ }
+
+ h, p, err := swag.SplitHostPort(listener.Addr().String())
+ if err != nil {
+ return err
+ }
+ s.Host = h
+ s.Port = p
+ s.httpServerL = listener
+ }
+
+ if s.hasScheme(schemeHTTPS) {
+ tlsListener, err := net.Listen("tcp", net.JoinHostPort(s.TLSHost, strconv.Itoa(s.TLSPort)))
+ if err != nil {
+ return err
+ }
+
+ sh, sp, err := swag.SplitHostPort(tlsListener.Addr().String())
+ if err != nil {
+ return err
+ }
+ s.TLSHost = sh
+ s.TLSPort = sp
+ s.httpsServerL = tlsListener
+ }
+
+ s.hasListeners = true
+ return nil
+}
+
+// Shutdown server and clean up resources
+func (s *Server) Shutdown() error {
+ if atomic.CompareAndSwapInt32(&s.shuttingDown, 0, 1) {
+ close(s.shutdown)
+ }
+ return nil
+}
+
+func (s *Server) handleShutdown(wg *sync.WaitGroup, serversPtr *[]*http.Server) {
+ // wg.Done must occur last, after s.api.ServerShutdown()
+ // (to preserve old behaviour)
+ defer wg.Done()
+
+ <-s.shutdown
+
+ servers := *serversPtr
+
+ ctx, cancel := context.WithTimeout(context.TODO(), s.GracefulTimeout)
+ defer cancel()
+
+ // first execute the pre-shutdown hook
+ s.api.PreServerShutdown()
+
+ shutdownChan := make(chan bool)
+ for i := range servers {
+ server := servers[i]
+ go func() {
+ var success bool
+ defer func() {
+ shutdownChan <- success
+ }()
+ if err := server.Shutdown(ctx); err != nil {
+ // Error from closing listeners, or context timeout:
+ s.Logf("HTTP server Shutdown: %v", err)
+ } else {
+ success = true
+ }
+ }()
+ }
+
+ // Wait until all listeners have successfully shut down before calling ServerShutdown
+ success := true
+ for range servers {
+ success = success && <-shutdownChan
+ }
+ if success {
+ s.api.ServerShutdown()
+ }
+}
+
+// GetHandler returns a handler useful for testing
+func (s *Server) GetHandler() http.Handler {
+ return s.handler
+}
+
+// SetHandler allows for setting a http handler on this server
+func (s *Server) SetHandler(handler http.Handler) {
+ s.handler = handler
+}
+
+// UnixListener returns the domain socket listener
+func (s *Server) UnixListener() (net.Listener, error) {
+ if !s.hasListeners {
+ if err := s.Listen(); err != nil {
+ return nil, err
+ }
+ }
+ return s.domainSocketL, nil
+}
+
+// HTTPListener returns the http listener
+func (s *Server) HTTPListener() (net.Listener, error) {
+ if !s.hasListeners {
+ if err := s.Listen(); err != nil {
+ return nil, err
+ }
+ }
+ return s.httpServerL, nil
+}
+
+// TLSListener returns the https listener
+func (s *Server) TLSListener() (net.Listener, error) {
+ if !s.hasListeners {
+ if err := s.Listen(); err != nil {
+ return nil, err
+ }
+ }
+ return s.httpsServerL, nil
+}
+
+func handleInterrupt(once *sync.Once, s *Server) {
+ once.Do(func(){
+ for range s.interrupt {
+ if s.interrupted {
+ s.Logf("Server already shutting down")
+ continue
+ }
+ s.interrupted = true
+ s.Logf("Shutting down... ")
+ if err := s.Shutdown(); err != nil {
+ s.Logf("HTTP server Shutdown: %v", err)
+ }
+ }
+ })
+}
+
+func signalNotify(interrupt chan<- os.Signal) {
+ signal.Notify(interrupt, syscall.SIGINT, syscall.SIGTERM)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/urlbuilder.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/urlbuilder.gotmpl
new file mode 100644
index 000000000..5d6010c0c
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/urlbuilder.gotmpl
@@ -0,0 +1,213 @@
+{{ define "queryparambuilder" }}
+{{ if not .IsArray }}{{ template "simplequeryparambuilder" . }}{{ else }}{{ template "slicequeryparambuilder" . }}{{ end }}
+{{- end }}
+{{ define "simplequeryparambuilder" }}
+{{ if .IsNullable -}}
+var {{ varname .ID }}Q string
+if {{ .ReceiverName }}.{{ pascalize .ID }} != nil {
+ {{ varname .ID }}Q = {{ if .Formatter }}{{ .Formatter }}(*{{ .ReceiverName }}.{{ pascalize .ID }}){{ else }}{{ if not .IsCustomFormatter }}*{{ end }}{{ .ReceiverName }}.{{ pascalize .ID }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+}
+{{ else }}{{ varname .ID }}Q := {{ if .Formatter }}{{ .Formatter }}({{ .ReceiverName }}.{{ pascalize .ID }}){{ else }}{{ .ReceiverName }}.{{ pascalize .ID }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+{{ end -}}
+if {{ varname .ID }}Q != "" {
+ qs.Set({{ printf "%q" .Name }}, {{ varname .ID }}Q)
+}
+{{ end }}
+{{ define "sliceitemqueryparambuilder" }}
+{{ if .IsNullable -}}
+var {{ .ValueExpression }}S string
+if {{ .ValueExpression }} != nil {
+ {{ .ValueExpression }}S = {{ if .Formatter }}{{ .Formatter }}(*{{ .ValueExpression }}){{ else }}*{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+}
+{{ else -}}
+{{ .ValueExpression }}S := {{ if .Formatter }}{{ .Formatter }}({{ .ValueExpression }}){{ else }}{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+{{ end -}}
+if {{ .ValueExpression }}S != "" {
+ {{ .ValueExpression }}R = append({{ .ValueExpression }}R, {{ .ValueExpression }}S)
+}
+{{ end }}
+{{define "slicequeryparambuilder" }}
+var {{ .Child.ValueExpression }}R []string
+for _, {{ .Child.ValueExpression }} := range {{ .ValueExpression }} {
+ {{- if not .Child.IsArray }}{{ template "sliceitemqueryparambuilder" .Child }}{{ else }}{{ template "slicequeryparambuilder" .Child }}{{ end -}}
+}
+
+{{ if not .Child.Parent -}}
+{{ varname .ID }} := swag.JoinByFormat({{ .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+{{ if eq .CollectionFormat "multi" }}
+for _, qsv := range {{ varname .ID }} {
+ qs.Add({{ printf "%q" .Name }}, qsv)
+}
+{{ else }}
+if len({{ varname .ID }}) > 0 {
+ qsv := {{ varname .ID }}[0]
+ if qsv != "" {
+ qs.Set({{ printf "%q" .Name }}, qsv)
+ }
+}
+{{ end }}
+{{ else -}}
+{{ .ValueExpression }}S := swag.JoinByFormat({{ .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+if len({{ .ValueExpression }}S) > 0 {
+ {{ .ValueExpression }}Ss := {{ .ValueExpression }}S[0]
+ if {{ .ValueExpression }}Ss != "" {
+ {{ .ValueExpression }}R = append({{ .ValueExpression }}R, {{ .ValueExpression }}Ss)
+ }
+}
+{{ end -}}
+{{ end -}}
+{{ define "slicepathparambuilder" }}
+var {{ .Child.ValueExpression }}R []string
+for _, {{ .Child.ValueExpression }} := range {{ .ValueExpression }} {
+ {{- if not .Child.IsArray }}{{ template "sliceitemqueryparambuilder" .Child }}{{ else }}{{ template "slicepathparambuilder" .Child }}{{ end -}}
+}
+
+{{ if not .Child.Parent -}}
+{{ varname .ID }} := swag.JoinByFormat({{ .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+if len({{ varname .ID }}) > 0 {
+ psv := {{ varname .ID }}[0]
+ if psv != "" {
+ _path = strings.Replace(_path, "{{ printf "{%s}" .Name }}", psv, -1)
+ } else {
+ return nil, errors.New("{{ camelize .ID }} is required on {{ pascalize $.Name }}URL")
+ }
+}
+{{ else -}}
+{{ .ValueExpression }}S := swag.JoinByFormat({{ .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+if len({{ .ValueExpression }}S) > 0 {
+ {{ .ValueExpression }}Ss := {{ .ValueExpression }}S[0]
+ if {{ .ValueExpression }}Ss != "" {
+ {{ .ValueExpression }}R = append({{ .ValueExpression }}R, {{ .ValueExpression }}Ss)
+ }
+}
+{{ end -}}
+{{ end }}
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the generate command
+
+import (
+ "fmt"
+ "errors"
+ "net/url"
+ golangswaggerpaths "path"
+ "strings"
+
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+)
+
+// {{ pascalize .Name }}URL generates an URL for the {{ humanize .Name }} operation
+type {{ pascalize .Name }}URL struct {
+ {{ range .PathParams }}
+ {{ pascalize .ID }} {{.GoType}}
+ {{- end }}
+ {{ range .QueryParams }}
+ {{ pascalize .ID }} {{ if and (not .IsArray) .IsNullable }}*{{ end }}{{.GoType}}
+ {{- end }}
+
+ _basePath string
+ {{ if or (gt (len .PathParams ) 0) (gt (len .QueryParams) 0) -}}
+ // avoid unkeyed usage
+ _ struct{}
+ {{- end }}
+
+}
+
+// WithBasePath sets the base path for this url builder, only required when it's different from the
+// base path specified in the swagger spec.
+// When the value of the base path is an empty string
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) WithBasePath(bp string) *{{pascalize .Name}}URL {
+ {{ .ReceiverName }}.SetBasePath(bp)
+ return {{ .ReceiverName }}
+}
+
+
+// SetBasePath sets the base path for this url builder, only required when it's different from the
+// base path specified in the swagger spec.
+// When the value of the base path is an empty string
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) SetBasePath(bp string) {
+ {{ .ReceiverName }}._basePath = bp
+}
+
+// Build a url path and query string
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) Build() (*url.URL, error) {
+ var _result url.URL
+
+ var _path = {{ printf "%q" .Path }}
+ {{ range .PathParams }}{{ if .IsArray }}
+ {{ template "slicepathparambuilder" . -}}
+ {{ else }}
+ {{ varname .ID }} := {{ if .Formatter }}{{ .Formatter }}({{ .ReceiverName }}.{{ pascalize .ID }}){{ else }}{{ .ReceiverName }}.{{ pascalize .ID }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+ if {{ varname .ID }} != "" {
+ _path = strings.Replace(_path, "{{ printf "{%s}" .Name }}", {{ varname .ID }}, -1)
+ } else {
+ return nil, errors.New("{{ camelize .ID }} is required on {{ pascalize $.Name }}URL")
+ }
+ {{ end }}
+
+ {{- end }}
+ _basePath := {{ .ReceiverName }}._basePath
+ {{ if .BasePath }}if _basePath == "" {
+ _basePath = {{ printf "%q" .BasePath }}
+ }
+ {{ end -}}
+ _result.Path = golangswaggerpaths.Join(_basePath, _path)
+
+ {{ if gt (len .QueryParams) 0 -}}
+ qs := make(url.Values)
+ {{ range .QueryParams }}
+ {{ template "queryparambuilder" . -}}
+ {{- end }}
+ _result.RawQuery = qs.Encode()
+ {{- end }}
+
+ return &_result, nil
+}
+
+// Must is a helper function to panic when the url builder returns an error
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) Must(u *url.URL, err error) *url.URL {
+ if err != nil {
+ panic(err)
+ }
+ if u == nil {
+ panic("url can't be nil")
+ }
+ return u
+}
+
+// String returns the string representation of the path with query string
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) String() string {
+ return {{ .ReceiverName }}.Must({{ .ReceiverName }}.Build()).String()
+}
+
+// BuildFull builds a full url with scheme, host, path and query string
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) BuildFull(scheme, host string) (*url.URL, error) {
+ if scheme == "" {
+ return nil, errors.New("scheme is required for a full url on {{ pascalize .Name }}URL")
+ }
+ if host == "" {
+ return nil, errors.New("host is required for a full url on {{ pascalize .Name }}URL")
+ }
+
+ base, err := {{ .ReceiverName }}.Build()
+ if err != nil {
+ return nil, err
+ }
+
+ base.Scheme = scheme
+ base.Host = host
+ return base, nil
+}
+
+// StringFull returns the string representation of a complete url
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) StringFull(scheme, host string) string {
+ return {{ .ReceiverName }}.Must( {{ .ReceiverName }}.BuildFull(scheme, host)).String()
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsinit.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsinit.gotmpl
new file mode 100644
index 000000000..a42f1cf2f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsinit.gotmpl
@@ -0,0 +1,23 @@
+{{- if and .IsPrimitive .IsCustomFormatter (not (stringContains .Zero "(\"")) }}
+ if err := {{ varname .ID}}Default.UnmarshalText([]byte({{ printf "%q" .Default }})) ; err != nil {
+ msg := fmt.Sprintf("invalid default value for {{ varname .ID }}: %v",err)
+ panic(msg) // panics if the specification is invalid
+ }
+{{- else if .IsArray }}
+ {{- if or ( and .Child.IsPrimitive .Child.IsCustomFormatter ) .Child.IsArray }}
+ if err := json.Unmarshal([]byte(`{{printf "%s" (json .Default)}}`), &{{ varname .ID }}Default); err != nil {
+ msg := fmt.Sprintf("invalid default value for {{ varname .ID }}: %v",err)
+ panic(msg) // panics if the specification is invalid
+ }
+ {{- else if and (not .Child.IsPrimitive) (not .Child.IsArray) }} {{/* shouldn't get there: guard */}}
+ if err := json.Unmarshal([]byte(`{{printf "%s" (json .Default)}}`), &{{ varname .ID }}Default); err != nil {
+ msg := fmt.Sprintf("invalid default value for {{ varname .ID }}: %v",err)
+ panic(msg) // panics if the specification is invalid
+ }
+ {{- end }}
+{{- else if not .IsPrimitive }} {{/* shouldn't get there: guard (defaults to letting json figure out) */}}
+ if err := json.Unmarshal([]byte(`{{printf "%s" (json .Default)}}`), &{{ varname .ID }}Default); err != nil {
+ msg := fmt.Sprintf("invalid default value for {{ varname .ID }}: %v",err)
+ panic(msg) // panics if the specification is invalid
+ }
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsvar.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsvar.gotmpl
new file mode 100644
index 000000000..cfb9f80e6
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsvar.gotmpl
@@ -0,0 +1,29 @@
+{{- varname .ID}}Default
+{{- if .IsPrimitive }}
+ {{- print " " }}={{ print " " }}
+ {{- if .IsCustomFormatter }}
+ {{- if stringContains .Zero "(\"" }}
+ {{- .GoType }}({{ printf "%#v" .Default }}){{/* strfmt type initializer that takes string */}}
+ {{- else }}
+ {{- .Zero }}{{/* strfmt type initializer that requires UnmarshalText(), e.g. Date, Datetime, Duration */}}
+ {{- end }}
+ {{- else }}
+ {{- .GoType }}({{ printf "%#v" .Default }}){{/* regular go primitive type initializer */}}
+ {{- end }}
+{{- else if .IsArray }}{{/* do not initialize from possible defaults in nested arrays */}}
+ {{- if .Child.IsPrimitive }}
+ {{- print " " }}={{ print " " }}
+ {{- if .Child.IsCustomFormatter }}
+ {{- .Zero }}{{/* initialization strategy with UnmarshalText() */}}
+ {{- else }}
+ {{- .GoType }}{{ arrayInitializer .Default }}{{/* regular go primitive type initializer: simple slice initializer */}}
+ {{- end }}
+ {{- else if .Child.IsArray }}
+ {{- print " " }}={{ print " " }}
+ {{- .Zero }}{{/* initialization strategy with json.Unmarshal() */}}
+ {{- else }}
+ {{- print " " }}{{ .GoType }}{{/* shouldn't have that: simple schema is either primitive or array */}}
+ {{- end }}
+{{- else }}
+ {{- print " " }}{{ .GoType }}{{/* shouldn't have that: simple schema is either primitive or array */}}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/structfield.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/structfield.gotmpl
new file mode 100644
index 000000000..c8e235dc3
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/structfield.gotmpl
@@ -0,0 +1,41 @@
+{{ define "structfield" }}
+ {{- if not $.IsBaseType -}}
+ // {{ template "docstring" . }}
+ {{- template "propertyValidationDocString" .}}
+ {{- end}}
+{{ pascalize .Name}} {{ template "schemaType" . }} {{ .PrintTags }}
+{{ end }}
+
+{{- define "tuplefield" }}
+ {{- if not $.IsBaseType -}}
+ // {{ template "docstring" . }}
+ {{- template "propertyValidationDocString" .}}
+{{ end }}
+{{- pascalize .Name}} {{ template "schemaType" . }} `json:"-"
+{{- if .CustomTag }} {{ .CustomTag }}{{ end }}` // custom serializer
+{{ end }}
+
+{{- define "structfieldIface" }}
+ {{- if not $.IsBaseType -}}
+ // {{ template "docstring" . }}
+ {{- template "propertyValidationDocString" .}}
+ {{- end }}
+{{ pascalize .Name}}() {{ template "schemaType" . }}
+Set{{ pascalize .Name}}({{ template "schemaType" . }})
+{{ end }}
+{{ define "tuplefieldIface" }}
+ {{- if not $.IsBaseType -}}
+ // {{ template "docstring" . }}
+ {{- template "propertyValidationDocString" . }}
+{{ end }}
+{{- pascalize .Name}}() {{ template "schemaType" . }}
+Set{{ pascalize .Name}}({{ template "schemaType" . }})
+{{ end }}
+
+{{- define "privstructfield" }}
+ {{- camelize .Name}}Field {{ template "schemaType" . }}
+{{ end }}
+
+{{- define "privtuplefield" }}
+ {{- camelize .Name}}Field {{ template "schemaType" . }}
+{{ end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/swagger_json_embed.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/swagger_json_embed.gotmpl
new file mode 100644
index 000000000..5bb9f1a4e
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/swagger_json_embed.gotmpl
@@ -0,0 +1,30 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "encoding/json"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+
+var (
+ // SwaggerJSON embedded version of the swagger document used at generation time
+ SwaggerJSON json.RawMessage
+ // FlatSwaggerJSON embedded flattened version of the swagger document used at generation time
+ FlatSwaggerJSON json.RawMessage
+)
+
+func init() {
+ SwaggerJSON = json.RawMessage([]byte(`{{ .SwaggerJSON }}`))
+ FlatSwaggerJSON = json.RawMessage([]byte(`{{ .FlatSwaggerJSON }}`))
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl
new file mode 100644
index 000000000..354075a90
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl
@@ -0,0 +1,3 @@
+if err := validate.FormatOf({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ printf "%q" .SwaggerFormat }}, {{ .ToString }}, formats); err != nil {
+ return err
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl
new file mode 100644
index 000000000..993f7344f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl
@@ -0,0 +1,23 @@
+{{- if or (hasPrefix .UnderlyingType "int") }}
+ {{- if and (hasPrefix .UnderlyingType "int64") (not .IsAliased) }}
+if err := validate.MaximumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- else }}
+if err := validate.MaximumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- end }}
+{{- else }}
+ {{- if hasPrefix .UnderlyingType "uint" }}
+ {{- if and (hasPrefix .UnderlyingType "uint64") (not .IsAliased) }}
+if err := validate.MaximumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- else }}
+if err := validate.MaximumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- end }}
+ {{- else }}
+ {{- if and (eq .UnderlyingType "float64") (not .IsAliased) }}
+if err := validate.Maximum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- else }}
+if err := validate.Maximum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- end }}
+ {{- end }}
+{{- end }}
+ return err
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl
new file mode 100644
index 000000000..626c207cb
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl
@@ -0,0 +1,23 @@
+{{- if hasPrefix .UnderlyingType "int" }}
+ {{- if and (hasPrefix .UnderlyingType "int64") (not .IsAliased) }}
+if err := validate.MinimumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- else }}
+if err := validate.MinimumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- end }}
+{{- else }}
+ {{- if hasPrefix .UnderlyingType "uint" }}
+ {{- if and (hasPrefix .UnderlyingType "uint64") (not .IsAliased) }}
+if err := validate.MinimumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- else }}
+if err := validate.MinimumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- end }}
+ {{- else }}
+ {{- if and (eq .UnderlyingType "float64") (not .IsAliased) }}
+if err := validate.Minimum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- else }}
+if err := validate.Minimum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- end }}
+ {{- end }}
+{{- end }}
+ return err
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl
new file mode 100644
index 000000000..28796852d
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl
@@ -0,0 +1,23 @@
+{{- if and (hasPrefix .UnderlyingType "int") (isInteger .MultipleOf) }}{{/* if the type is an integer, but the multiple factor is not, fall back to the float64 version of the validator */}}
+ {{- if and (hasPrefix .UnderlyingType "int64") (not .IsAliased) }}
+if err := validate.MultipleOfInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil {
+ {{- else }}
+if err := validate.MultipleOfInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil {
+ {{- end }}
+{{- else }}
+ {{- if and (hasPrefix .UnderlyingType "uint") (isInteger .MultipleOf) }}
+ {{- if and (hasPrefix .UnderlyingType "uint64") (not .IsAliased) }}
+if err := validate.MultipleOfUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil {
+ {{- else }}
+if err := validate.MultipleOfUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil {
+ {{- end }}
+ {{- else }}
+ {{- if and (eq .UnderlyingType "float64") (not .IsAliased) }}
+if err := validate.MultipleOf({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil {
+ {{- else }}
+if err := validate.MultipleOf({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil {
+ {{- end }}
+ {{- end }}
+{{- end }}
+ return err
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl
new file mode 100644
index 000000000..35238d784
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl
@@ -0,0 +1,29 @@
+{{if .MinLength}}
+if err := validate.MinLength({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MinLength}}); err != nil {
+ return err
+}
+{{end}}
+{{if .MaxLength}}
+if err := validate.MaxLength({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MaxLength}}); err != nil {
+ return err
+}
+{{end}}
+{{if .Pattern}}
+if err := validate.Pattern({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .ToString }}, `{{escapeBackticks .Pattern}}`); err != nil {
+ return err
+}
+{{end}}
+{{if .Minimum}}
+ {{ template "validationMinimum" . }}
+{{end}}
+{{if .Maximum}}
+ {{ template "validationMaximum" . }}
+{{end}}
+{{if .MultipleOf}}
+ {{ template "validationMultipleOf" . }}
+{{end}}
+{{if .Enum}}
+if err := validate.EnumCase({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) .IsNullable }}*{{ end }}{{.ValueExpression}}{{ if .IsCustomFormatter }}.String(){{ end }}, {{ printf "%#v" .Enum}}, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+}
+{{end}}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl
new file mode 100644
index 000000000..8378c4615
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl
@@ -0,0 +1,62 @@
+{{ define "propertyValidationDocString" }}
+
+{{- if .Required }}
+// Required: true
+{{- end }}
+
+{{- if .ReadOnly }}
+// Read Only: true
+{{- end }}
+
+{{- if .Maximum }}
+// Maximum: {{ if .ExclusiveMaximum }}< {{ end }}{{ .Maximum }}
+{{- end }}
+
+{{- if .Minimum }}
+// Minimum: {{ if .ExclusiveMinimum }}> {{ end }}{{ .Minimum }}
+{{- end }}
+
+{{- if .MultipleOf }}
+// Multiple Of: {{ .MultipleOf }}
+{{- end }}
+
+{{- if .MaxLength }}
+// Max Length: {{ .MaxLength }}
+{{- end }}
+
+{{- if .MinLength }}
+// Min Length: {{ .MinLength }}
+{{- end }}
+
+{{- if .Pattern }}
+// Pattern: {{ .Pattern }}
+{{- end }}
+
+{{- if .MaxItems }}
+// Max Items: {{ .MaxItems }}
+{{- end }}
+
+{{- if .MinItems }}
+// Min Items: {{ .MinItems }}
+{{- end }}
+
+{{- if .MinProperties }}
+// Min Properties: {{ .MinProperties }}
+{{- end }}
+
+{{- if .MaxProperties }}
+// Max Properties: {{ .MaxProperties }}
+{{- end }}
+
+{{- if .UniqueItems }}
+// Unique: true
+{{- end }}
+
+{{- if .IsCustomFormatter }}
+// Format: {{ .SwaggerFormat }}
+{{- end }}
+
+{{- if .Enum }}
+// Enum: {{ printf "%v" .Enum }}
+{{- end }}
+{{- end}}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/types.go b/vendor/github.com/go-swagger/go-swagger/generator/types.go
new file mode 100644
index 000000000..d2a6a4f5e
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/types.go
@@ -0,0 +1,1284 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "fmt"
+ "log"
+ "path/filepath"
+ "reflect"
+ "strings"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+ "github.com/kr/pretty"
+ "github.com/mitchellh/mapstructure"
+)
+
+const (
+ iface = "interface{}"
+ array = "array"
+ file = "file"
+ number = "number"
+ integer = "integer"
+ boolean = "boolean"
+ str = "string"
+ object = "object"
+ binary = "binary"
+ body = "body"
+ b64 = "byte"
+)
+
+// Extensions supported by go-swagger
+const (
+ xClass = "x-class" // class name used by discriminator
+ xGoCustomTag = "x-go-custom-tag" // additional tag for serializers on struct fields
+ xGoName = "x-go-name" // name of the generated go variable
+ xGoType = "x-go-type" // reuse existing type (do not generate)
+ xIsNullable = "x-isnullable"
+ xNullable = "x-nullable" // turns the schema into a pointer
+ xOmitEmpty = "x-omitempty"
+ xSchemes = "x-schemes" // additional schemes supported for operations (server generation)
+ xOrder = "x-order" // sort order for properties (or any schema)
+ xGoJSONString = "x-go-json-string"
+ xGoEnumCI = "x-go-enum-ci" // make string enumeration case-insensitive
+
+ xGoOperationTag = "x-go-operation-tag" // additional tag to override generation in operation groups
+)
+
+// swaggerTypeName contains a mapping from go type to swagger type or format
+var swaggerTypeName map[string]string
+
+func initTypes() {
+ swaggerTypeName = make(map[string]string)
+ for k, v := range typeMapping {
+ swaggerTypeName[v] = k
+ }
+}
+
+func simpleResolvedType(tn, fmt string, items *spec.Items, v *spec.CommonValidations) (result resolvedType) {
+ result.SwaggerType = tn
+ result.SwaggerFormat = fmt
+
+ defer func() {
+ guardValidations(result.SwaggerType, v)
+ }()
+
+ if tn == file {
+ // special case of swagger type "file", rendered as io.ReadCloser interface
+ result.IsPrimitive = true
+ result.GoType = formatMapping[str][binary]
+ result.IsStream = true
+ return
+ }
+
+ if fmt != "" {
+ defer func() {
+ guardFormatConflicts(result.SwaggerFormat, v)
+ }()
+
+ fmtn := strings.ReplaceAll(fmt, "-", "")
+ if fmm, ok := formatMapping[tn]; ok {
+ if tpe, ok := fmm[fmtn]; ok {
+ result.GoType = tpe
+ result.IsPrimitive = true
+ _, result.IsCustomFormatter = customFormatters[tpe]
+ // special case of swagger format "binary", rendered as io.ReadCloser interface
+ // TODO(fredbi): should set IsCustomFormatter=false when binary
+ result.IsStream = fmt == binary
+ // special case of swagger format "byte", rendered as a strfmt.Base64 type: no validation
+ result.IsBase64 = fmt == b64
+ return
+ }
+ }
+ }
+
+ if tpe, ok := typeMapping[tn]; ok {
+ result.GoType = tpe
+ _, result.IsPrimitive = primitives[tpe]
+ result.IsPrimitive = ok
+ return
+ }
+
+ if tn == array {
+ result.IsArray = true
+ result.IsPrimitive = false
+ result.IsCustomFormatter = false
+ result.IsNullable = false
+ if items == nil {
+ result.GoType = "[]" + iface
+ return
+ }
+ res := simpleResolvedType(items.Type, items.Format, items.Items, &items.CommonValidations)
+ result.GoType = "[]" + res.GoType
+ return
+ }
+ result.GoType = tn
+ _, result.IsPrimitive = primitives[tn]
+ return
+}
+
+func newTypeResolver(pkg, _ string, doc *loads.Document) *typeResolver {
+ resolver := typeResolver{ModelsPackage: pkg, Doc: doc}
+ resolver.KnownDefs = make(map[string]struct{}, len(doc.Spec().Definitions))
+ for k, sch := range doc.Spec().Definitions {
+ tpe, _, _ := resolver.knownDefGoType(k, sch, nil)
+ resolver.KnownDefs[tpe] = struct{}{}
+ }
+ return &resolver
+}
+
+// knownDefGoType returns go type, package and package alias for definition
+func (t typeResolver) knownDefGoType(def string, schema spec.Schema, clear func(string) string) (string, string, string) {
+ debugLog("known def type: %q", def)
+ ext := schema.Extensions
+ nm, hasGoName := ext.GetString(xGoName)
+
+ if hasGoName {
+ debugLog("known def type %s named from %s as %q", def, xGoName, nm)
+ def = nm
+ }
+ extType, isExternalType := t.resolveExternalType(ext)
+ if !isExternalType || extType.Embedded {
+ if clear == nil {
+ debugLog("known def type no clear: %q", def)
+ return def, t.definitionPkg, ""
+ }
+ debugLog("known def type clear: %q -> %q", def, clear(def))
+ return clear(def), t.definitionPkg, ""
+ }
+
+ // external type definition trumps regular type resolution
+ if extType.Import.Alias == "" {
+ debugLog("type %s imported as external type %s, assumed in current package", def, extType.Type)
+ return extType.Type, extType.Import.Package, extType.Import.Alias
+ }
+ debugLog("type %s imported as external type from %s as %s.%s", def, extType.Import.Package, extType.Import.Alias, extType.Type)
+ return extType.Import.Alias + "." + extType.Type, extType.Import.Package, extType.Import.Alias
+}
+
+// x-go-type:
+//
+// type: mytype
+// import:
+// package:
+// alias:
+// hints:
+// kind: map|object|array|interface|primitive|stream|tuple
+// nullable: true|false
+// embedded: true
+type externalTypeDefinition struct {
+ Type string
+ Import struct {
+ Package string
+ Alias string
+ }
+ Hints struct {
+ Kind string
+ Nullable *bool
+ NoValidation *bool
+ }
+ Embedded bool
+}
+
+func hasExternalType(ext spec.Extensions) (*externalTypeDefinition, bool) {
+ v, ok := ext[xGoType]
+ if !ok {
+ return nil, false
+ }
+
+ var extType externalTypeDefinition
+ err := mapstructure.Decode(v, &extType)
+ if err != nil {
+ log.Printf("warning: x-go-type extension could not be decoded (%v). Skipped", v)
+ return nil, false
+ }
+
+ return &extType, true
+}
+
+func (t typeResolver) resolveExternalType(ext spec.Extensions) (*externalTypeDefinition, bool) {
+ extType, hasExt := hasExternalType(ext)
+ if !hasExt {
+ return nil, false
+ }
+
+ // NOTE:
+ // * basic deconfliction of the default alias
+ // * if no package is specified, defaults to models (as provided from CLI or defaut generation location for models)
+ toAlias := func(pkg string) string {
+ mangled := GoLangOpts().ManglePackageName(pkg, "")
+ return deconflictPkg(mangled, func(in string) string {
+ return in + "ext"
+ })
+ }
+
+ switch {
+ case extType.Import.Package != "" && extType.Import.Alias == "":
+ extType.Import.Alias = toAlias(extType.Import.Package)
+ case extType.Import.Package == "" && extType.Import.Alias != "":
+ extType.Import.Package = t.ModelsFullPkg
+ case extType.Import.Package == "" && extType.Import.Alias == "":
+ // in this case, the external type is assumed to be present in the current package.
+ // For completion, whenever this type is used in anonymous types declared by operations,
+ // we assume this is the package where models are expected to be found.
+ extType.Import.Package = t.ModelsFullPkg
+ if extType.Import.Package != "" {
+ extType.Import.Alias = toAlias(extType.Import.Package)
+ }
+ }
+
+ debugLogAsJSON("known def external %s type", xGoType, extType)
+
+ return extType, true
+}
+
+type typeResolver struct {
+ Doc *loads.Document
+ ModelsPackage string // package alias (e.g. "models")
+ ModelsFullPkg string // fully qualified package (e.g. "github.com/example/models")
+ ModelName string
+ KnownDefs map[string]struct{}
+ // unexported fields
+ keepDefinitionsPkg string
+ knownDefsKept map[string]struct{}
+ definitionPkg string // pkg alias to fill in GenSchema.Pkg
+}
+
+// NewWithModelName clones a type resolver and specifies a new model name
+func (t *typeResolver) NewWithModelName(name string) *typeResolver {
+ tt := newTypeResolver(t.ModelsPackage, t.ModelsFullPkg, t.Doc)
+ tt.ModelName = name
+
+ // propagates kept definitions
+ tt.keepDefinitionsPkg = t.keepDefinitionsPkg
+ tt.knownDefsKept = t.knownDefsKept
+ tt.definitionPkg = t.definitionPkg
+ return tt
+}
+
+// withKeepDefinitionsPackage instructs the type resolver to keep previously resolved package name for
+// definitions known at the moment it is first called.
+func (t *typeResolver) withKeepDefinitionsPackage(definitionsPackage string) *typeResolver {
+ t.keepDefinitionsPkg = definitionsPackage
+ t.knownDefsKept = make(map[string]struct{}, len(t.KnownDefs))
+ for k := range t.KnownDefs {
+ t.knownDefsKept[k] = struct{}{}
+ }
+ return t
+}
+
+// withDefinitionPackage sets the definition pkg that object/struct types to be generated
+// in GenSchema.Pkg field.
+// ModelsPackage field can not replace definitionPkg since ModelsPackage will be prepend to .GoType,
+// while definitionPkg is just used to fill the .Pkg in GenSchema
+func (t *typeResolver) withDefinitionPackage(pkg string) *typeResolver {
+ t.definitionPkg = pkg
+ return t
+}
+
+func (t *typeResolver) resolveSchemaRef(schema *spec.Schema, isRequired bool) (returns bool, result resolvedType, err error) {
+ if schema.Ref.String() == "" {
+ return
+ }
+ debugLog("resolving ref (anon: %t, req: %t) %s", false, isRequired, schema.Ref.String())
+
+ returns = true
+ var ref *spec.Schema
+ var er error
+
+ ref, er = spec.ResolveRef(t.Doc.Spec(), &schema.Ref)
+ if er != nil {
+ debugLog("error resolving ref %s: %v", schema.Ref.String(), er)
+ err = er
+ return
+ }
+
+ extType, isExternalType := t.resolveExternalType(schema.Extensions)
+ if isExternalType {
+ // deal with validations for an aliased external type
+ result.SkipExternalValidation = swag.BoolValue(extType.Hints.NoValidation)
+ }
+
+ res, er := t.ResolveSchema(ref, false, isRequired)
+ if er != nil {
+ err = er
+ return
+ }
+ result = res
+
+ tn := filepath.Base(schema.Ref.GetURL().Fragment)
+ tpe, pkg, alias := t.knownDefGoType(tn, *ref, t.goTypeName)
+ debugLog("type name %s, package %s, alias %s", tpe, pkg, alias)
+ if tpe != "" {
+ result.GoType = tpe
+ result.Pkg = pkg
+ result.PkgAlias = alias
+ }
+ result.HasDiscriminator = res.HasDiscriminator
+ result.IsBaseType = result.HasDiscriminator
+ result.IsNullable = result.IsNullable || t.isNullable(ref) // this has to be overridden for slices and maps
+ result.IsEnumCI = false
+ return
+}
+
+func (t *typeResolver) inferAliasing(result *resolvedType, _ *spec.Schema, isAnonymous bool, _ bool) {
+ if !isAnonymous && t.ModelName != "" {
+ result.AliasedType = result.GoType
+ result.IsAliased = true
+ result.GoType = t.goTypeName(t.ModelName)
+ result.Pkg = t.definitionPkg
+ }
+}
+
+func (t *typeResolver) resolveFormat(schema *spec.Schema, isAnonymous bool, isRequired bool) (returns bool, result resolvedType, err error) {
+
+ if schema.Format != "" {
+ // defaults to string
+ result.SwaggerType = str
+ if len(schema.Type) > 0 {
+ result.SwaggerType = schema.Type[0]
+ }
+
+ debugLog("resolving format (anon: %t, req: %t)", isAnonymous, isRequired)
+ schFmt := strings.ReplaceAll(schema.Format, "-", "")
+ if fmm, ok := formatMapping[result.SwaggerType]; ok {
+ if tpe, ok := fmm[schFmt]; ok {
+ returns = true
+ result.GoType = tpe
+ _, result.IsCustomFormatter = customFormatters[tpe]
+ }
+ }
+ if tpe, ok := typeMapping[schFmt]; !returns && ok {
+ returns = true
+ result.GoType = tpe
+ _, result.IsCustomFormatter = customFormatters[tpe]
+ }
+
+ result.SwaggerFormat = schema.Format
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+ // special case of swagger format "binary", rendered as io.ReadCloser interface and is therefore not a primitive type
+ // TODO: should set IsCustomFormatter=false in this case.
+ result.IsPrimitive = schFmt != binary
+ result.IsStream = schFmt == binary
+ result.IsBase64 = schFmt == b64
+ // propagate extensions in resolvedType
+ result.Extensions = schema.Extensions
+
+ switch result.SwaggerType {
+ case str:
+ result.IsNullable = nullableStrfmt(schema, isRequired)
+ case number, integer:
+ result.IsNullable = nullableNumber(schema, isRequired)
+ default:
+ result.IsNullable = t.isNullable(schema)
+ }
+ }
+
+ guardFormatConflicts(schema.Format, schema)
+ return
+}
+
+// isNullable hints the generator as to render the type with a pointer or not.
+//
+// A schema is deemed nullable (i.e. rendered by a pointer) when:
+// - a custom extension says it has to be so
+// - it is an object with properties
+// - it is a composed object (allOf)
+//
+// The interpretation of Required as a mean to make a type nullable is carried out elsewhere.
+func (t *typeResolver) isNullable(schema *spec.Schema) bool {
+
+ if nullable, ok := t.isNullableOverride(schema); ok {
+ return nullable
+ }
+
+ return len(schema.Properties) > 0 || len(schema.AllOf) > 0
+}
+
+// isNullableOverride determines a nullable flag forced by an extension
+func (t *typeResolver) isNullableOverride(schema *spec.Schema) (bool, bool) {
+ check := func(extension string) (bool, bool) {
+ v, found := schema.Extensions[extension]
+ nullable, cast := v.(bool)
+ return nullable, found && cast
+ }
+
+ if nullable, ok := check(xIsNullable); ok {
+ return nullable, ok
+ }
+
+ if nullable, ok := check(xNullable); ok {
+ return nullable, ok
+ }
+
+ return false, false
+}
+
+func (t *typeResolver) firstType(schema *spec.Schema) string {
+ if len(schema.Type) == 0 || schema.Type[0] == "" {
+ return object
+ }
+ if len(schema.Type) > 1 {
+ // JSON-Schema multiple types, e.g. {"type": [ "object", "array" ]} are not supported.
+ // TODO: should keep the first _supported_ type, e.g. skip null
+ log.Printf("warning: JSON-Schema type definition as array with several types is not supported in %#v. Taking the first type: %s", schema.Type, schema.Type[0])
+ }
+ return schema.Type[0]
+}
+
+func (t *typeResolver) resolveArray(schema *spec.Schema, isAnonymous, isRequired bool) (result resolvedType, err error) {
+ debugLog("resolving array (anon: %t, req: %t)", isAnonymous, isRequired)
+
+ result.IsArray = true
+ result.IsNullable = false
+
+ if schema.AdditionalItems != nil {
+ result.HasAdditionalItems = (schema.AdditionalItems.Allows || schema.AdditionalItems.Schema != nil)
+ }
+
+ if schema.Items == nil {
+ result.GoType = "[]" + iface
+ result.SwaggerType = array
+ result.SwaggerFormat = ""
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+
+ return
+ }
+
+ if len(schema.Items.Schemas) > 0 {
+ result.IsArray = false
+ result.IsTuple = true
+ result.SwaggerType = array
+ result.SwaggerFormat = ""
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+
+ return
+ }
+
+ rt, er := t.ResolveSchema(schema.Items.Schema, true, false)
+ if er != nil {
+ err = er
+ return
+ }
+
+ // Override the general nullability rule from ResolveSchema() in array elements:
+ // - only complex items are nullable (when not discriminated, not forced by x-nullable)
+ // - arrays of allOf have non nullable elements when not forced by x-nullable
+ elem := schema.Items.Schema
+ if elem.Ref.String() != "" {
+ // drill into $ref to figure out whether we want the element type to nullable or not
+ resolved, erf := spec.ResolveRef(t.Doc.Spec(), &elem.Ref)
+ if erf != nil {
+ debugLog("error resolving ref %s: %v", schema.Ref.String(), erf)
+ }
+ elem = resolved
+ }
+
+ debugLogAsJSON("resolved item for %s", rt.GoType, elem)
+ if nullable, ok := t.isNullableOverride(elem); ok {
+ debugLog("found nullable override in element %s: %t", rt.GoType, nullable)
+ rt.IsNullable = nullable
+ } else {
+ // this differs from isNullable for elements with AllOf
+ debugLog("no nullable override in element %s: Properties: %t, HasDiscriminator: %t", rt.GoType, len(elem.Properties) > 0, rt.HasDiscriminator)
+ rt.IsNullable = len(elem.Properties) > 0 && !rt.HasDiscriminator
+ }
+
+ result.GoType = "[]" + rt.GoType
+ if rt.IsNullable && !strings.HasPrefix(rt.GoType, "*") {
+ result.GoType = "[]*" + rt.GoType
+ }
+
+ result.ElemType = &rt
+ result.SwaggerType = array
+ result.SwaggerFormat = ""
+ result.IsEnumCI = hasEnumCI(schema.Extensions)
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+ result.Extensions = schema.Extensions
+
+ return
+}
+
+func (t *typeResolver) goTypeName(nm string) string {
+ if len(t.knownDefsKept) > 0 {
+ // if a definitions package has been defined, already resolved definitions are
+ // always resolved against their original package (e.g. "models"), and not the
+ // current package.
+ // This allows complex anonymous extra schemas to reuse known definitions generated in another package.
+ if _, ok := t.knownDefsKept[nm]; ok {
+ return strings.Join([]string{t.keepDefinitionsPkg, swag.ToGoName(nm)}, ".")
+ }
+ }
+
+ if t.ModelsPackage == "" {
+ return swag.ToGoName(nm)
+ }
+ if _, ok := t.KnownDefs[nm]; ok {
+ return strings.Join([]string{t.ModelsPackage, swag.ToGoName(nm)}, ".")
+ }
+ return swag.ToGoName(nm)
+}
+
+func (t *typeResolver) resolveObject(schema *spec.Schema, isAnonymous bool) (result resolvedType, err error) {
+ debugLog("resolving object %s (anon: %t, req: %t)", t.ModelName, isAnonymous, false)
+
+ result.IsAnonymous = isAnonymous
+
+ result.IsBaseType = schema.Discriminator != ""
+ if !isAnonymous {
+ result.SwaggerType = object
+ tpe, pkg, alias := t.knownDefGoType(t.ModelName, *schema, t.goTypeName)
+ result.GoType = tpe
+ result.Pkg = pkg
+ result.PkgAlias = alias
+ }
+ if len(schema.AllOf) > 0 {
+ result.GoType = t.goTypeName(t.ModelName)
+ result.IsComplexObject = true
+ var isNullable bool
+ for _, sch := range schema.AllOf {
+ p := sch
+ if t.isNullable(&p) {
+ isNullable = true
+ }
+ }
+ if override, ok := t.isNullableOverride(schema); ok {
+ // prioritize x-nullable extensions
+ result.IsNullable = override
+ } else {
+ result.IsNullable = isNullable
+ }
+ result.SwaggerType = object
+ return
+ }
+
+ // if this schema has properties, build a map of property name to
+ // resolved type, this should also flag the object as anonymous,
+ // when a ref is found, the anonymous flag will be reset
+ if len(schema.Properties) > 0 {
+ result.IsNullable = t.isNullable(schema)
+ result.IsComplexObject = true
+ // no return here, still need to check for additional properties
+ }
+
+ // account for additional properties
+ if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
+ sch := schema.AdditionalProperties.Schema
+ et, er := t.ResolveSchema(sch, sch.Ref.String() == "", false)
+ if er != nil {
+ err = er
+ return
+ }
+
+ result.IsMap = !result.IsComplexObject
+
+ result.SwaggerType = object
+
+ if et.IsExternal {
+ // external AdditionalProperties are a special case because we look ahead into schemas
+ extType, _, _ := t.knownDefGoType(t.ModelName, *sch, t.goTypeName)
+ et.GoType = extType
+ }
+
+ // only complex map elements are nullable (when not forced by x-nullable)
+ // TODO: figure out if required to check when not discriminated like arrays?
+ et.IsNullable = !et.IsArray && t.isNullable(schema.AdditionalProperties.Schema)
+ if et.IsNullable {
+ result.GoType = "map[string]*" + et.GoType
+ } else {
+ result.GoType = "map[string]" + et.GoType
+ }
+
+ // Resolving nullability conflicts for:
+ // - map[][]...[]{items}
+ // - map[]{aliased type}
+ //
+ // when IsMap is true and the type is a distinct definition,
+ // aliased type or anonymous construct generated independently.
+ //
+ // IsMapNullOverride is to be handled by the generator for special cases
+ // where the map element is considered non nullable and the element itself is.
+ //
+ // This allows to appreciate nullability according to the context
+ needsOverride := result.IsMap && (et.IsArray || (sch.Ref.String() != "" || et.IsAliased || et.IsAnonymous))
+
+ if needsOverride {
+ var er error
+ if et.IsArray {
+ var it resolvedType
+ s := sch
+ // resolve the last items after nested arrays
+ for s.Items != nil && s.Items.Schema != nil {
+ it, er = t.ResolveSchema(s.Items.Schema, sch.Ref.String() == "", false)
+ if er != nil {
+ return
+ }
+ s = s.Items.Schema
+ }
+ // mark an override when nullable status conflicts, i.e. when the original type is not already nullable
+ if !it.IsAnonymous || it.IsAnonymous && it.IsNullable {
+ result.IsMapNullOverride = true
+ }
+ } else {
+ // this locks the generator on the local nullability status
+ result.IsMapNullOverride = true
+ }
+ }
+
+ t.inferAliasing(&result, schema, isAnonymous, false)
+ result.ElemType = &et
+ return
+ }
+
+ if len(schema.Properties) > 0 {
+ return
+ }
+
+ // an object without property and without AdditionalProperties schema is rendered as interface{}
+ result.IsMap = true
+ result.SwaggerType = object
+ result.IsNullable = false
+ // an object without properties but with MinProperties or MaxProperties is rendered as map[string]interface{}
+ result.IsInterface = len(schema.Properties) == 0 && !schema.Validations().HasObjectValidations()
+ if result.IsInterface {
+ result.GoType = iface
+ } else {
+ result.GoType = "map[string]interface{}"
+ }
+ return
+}
+
+// nullableBool makes a boolean a pointer when we want to distinguish the zero value from no value set.
+// This is the case when:
+// - a x-nullable extension says so in the spec
+// - it is **not** a read-only property
+// - it is a required property
+// - it has a default value
+func nullableBool(schema *spec.Schema, isRequired bool) bool {
+ if nullable := nullableExtension(schema.Extensions); nullable != nil {
+ return *nullable
+ }
+ required := isRequired && schema.Default == nil && !schema.ReadOnly
+ optional := !isRequired && (schema.Default != nil || schema.ReadOnly)
+
+ return required || optional
+}
+
+// nullableNumber makes a number a pointer when we want to distinguish the zero value from no value set.
+// This is the case when:
+// - a x-nullable extension says so in the spec
+// - it is **not** a read-only property
+// - it is a required property
+// - boundaries defines the zero value as a valid value:
+// - there is a non-exclusive boundary set at the zero value of the type
+// - the [min,max] range crosses the zero value of the type
+func nullableNumber(schema *spec.Schema, isRequired bool) bool {
+ if nullable := nullableExtension(schema.Extensions); nullable != nil {
+ return *nullable
+ }
+ hasDefault := schema.Default != nil && !swag.IsZero(schema.Default)
+
+ isMin := schema.Minimum != nil && (*schema.Minimum != 0 || schema.ExclusiveMinimum)
+ bcMin := schema.Minimum != nil && *schema.Minimum == 0 && !schema.ExclusiveMinimum
+ isMax := schema.Minimum == nil && (schema.Maximum != nil && (*schema.Maximum != 0 || schema.ExclusiveMaximum))
+ bcMax := schema.Maximum != nil && *schema.Maximum == 0 && !schema.ExclusiveMaximum
+ isMinMax := (schema.Minimum != nil && schema.Maximum != nil && *schema.Minimum < *schema.Maximum)
+ bcMinMax := (schema.Minimum != nil && schema.Maximum != nil && (*schema.Minimum < 0 && 0 < *schema.Maximum))
+
+ nullable := !schema.ReadOnly && (isRequired || (hasDefault && !(isMin || isMax || isMinMax)) || bcMin || bcMax || bcMinMax)
+ return nullable
+}
+
+// nullableString makes a string nullable when we want to distinguish the zero value from no value set.
+// This is the case when:
+// - a x-nullable extension says so in the spec
+// - it is **not** a read-only property
+// - it is a required property
+// - it has a MinLength property set to 0
+// - it has a default other than "" (the zero for strings) and no MinLength or zero MinLength
+func nullableString(schema *spec.Schema, isRequired bool) bool {
+ if nullable := nullableExtension(schema.Extensions); nullable != nil {
+ return *nullable
+ }
+ hasDefault := schema.Default != nil && !swag.IsZero(schema.Default)
+
+ isMin := schema.MinLength != nil && *schema.MinLength != 0
+ bcMin := schema.MinLength != nil && *schema.MinLength == 0
+
+ nullable := !schema.ReadOnly && (isRequired || (hasDefault && !isMin) || bcMin)
+ return nullable
+}
+
+func nullableStrfmt(schema *spec.Schema, isRequired bool) bool {
+ notBinary := schema.Format != binary
+ if nullable := nullableExtension(schema.Extensions); nullable != nil && notBinary {
+ return *nullable
+ }
+ hasDefault := schema.Default != nil && !swag.IsZero(schema.Default)
+
+ nullable := !schema.ReadOnly && (isRequired || hasDefault)
+ return notBinary && nullable
+}
+
+func nullableExtension(ext spec.Extensions) *bool {
+ if ext == nil {
+ return nil
+ }
+
+ if boolPtr := boolExtension(ext, xNullable); boolPtr != nil {
+ return boolPtr
+ }
+
+ return boolExtension(ext, xIsNullable)
+}
+
+func boolExtension(ext spec.Extensions, key string) *bool {
+ if v, ok := ext[key]; ok {
+ if bb, ok := v.(bool); ok {
+ return &bb
+ }
+ }
+ return nil
+}
+
+func hasEnumCI(ve spec.Extensions) bool {
+ v, ok := ve[xGoEnumCI]
+ if !ok {
+ return false
+ }
+
+ isEnumCI, ok := v.(bool)
+ // All enumeration types are case-sensitive by default
+ return ok && isEnumCI
+}
+
+func (t *typeResolver) shortCircuitResolveExternal(tpe, pkg, alias string, extType *externalTypeDefinition, schema *spec.Schema, isRequired bool) resolvedType {
+ // short circuit type resolution for external types
+ debugLogAsJSON("shortCircuitResolveExternal", extType)
+
+ var result resolvedType
+ result.Extensions = schema.Extensions
+ result.GoType = tpe
+ result.Pkg = pkg
+ result.PkgAlias = alias
+ result.IsInterface = false
+ // by default consider that we have a type with validations. Use hint "interface" or "noValidation" to disable validations
+ result.SkipExternalValidation = swag.BoolValue(extType.Hints.NoValidation)
+ result.IsNullable = isRequired
+
+ result.setKind(extType.Hints.Kind)
+ if result.IsInterface || result.IsStream {
+ result.IsNullable = false
+ }
+ if extType.Hints.Nullable != nil {
+ result.IsNullable = swag.BoolValue(extType.Hints.Nullable)
+ }
+
+ if nullable, ok := t.isNullableOverride(schema); ok {
+ result.IsNullable = nullable // x-nullable directive rules them all
+ }
+
+ // other extensions
+ if result.IsArray {
+ result.IsEmptyOmitted = false
+ tpe = "array"
+ }
+
+ result.setExtensions(schema, tpe)
+ return result
+}
+
+func (t *typeResolver) ResolveSchema(schema *spec.Schema, isAnonymous, isRequired bool) (result resolvedType, err error) {
+ debugLog("resolving schema (anon: %t, req: %t) %s", isAnonymous, isRequired, t.ModelName)
+ defer func() {
+ debugLog("returning after resolve schema: %s", pretty.Sprint(result))
+ }()
+
+ if schema == nil {
+ result.IsInterface = true
+ result.GoType = iface
+ return
+ }
+
+ extType, isExternalType := t.resolveExternalType(schema.Extensions)
+ if isExternalType {
+ tpe, pkg, alias := t.knownDefGoType(t.ModelName, *schema, t.goTypeName)
+ debugLog("found type %s declared as external, imported from %s as %s. Has type hints? %t, rendered has embedded? %t",
+ t.ModelName, pkg, tpe, extType.Hints.Kind != "", extType.Embedded)
+
+ if extType.Hints.Kind != "" && !extType.Embedded {
+ // use hint to qualify type
+ debugLog("short circuits external type resolution with hint for %s", tpe)
+ result = t.shortCircuitResolveExternal(tpe, pkg, alias, extType, schema, isRequired)
+ result.IsExternal = isAnonymous // mark anonymous external types only, not definitions
+ return
+ }
+
+ // use spec to qualify type
+ debugLog("marking type %s as external embedded: %t", tpe, extType.Embedded)
+ defer func() { // enforce bubbling up decisions taken about being an external type
+ // mark this type as an embedded external definition if requested
+ result.IsEmbedded = extType.Embedded
+ result.IsExternal = isAnonymous // for non-embedded, mark anonymous external types only, not definitions
+
+ result.IsAnonymous = false
+ result.IsAliased = true
+ result.IsNullable = isRequired
+ if extType.Hints.Nullable != nil {
+ result.IsNullable = swag.BoolValue(extType.Hints.Nullable)
+ }
+
+ result.IsMap = false
+ result.AliasedType = result.GoType
+ result.IsInterface = false
+
+ if result.IsEmbedded {
+ result.ElemType = &resolvedType{
+ IsExternal: isAnonymous, // mark anonymous external types only, not definitions
+ IsInterface: false,
+ Pkg: extType.Import.Package,
+ PkgAlias: extType.Import.Alias,
+ SkipExternalValidation: swag.BoolValue(extType.Hints.NoValidation),
+ }
+ if extType.Import.Alias != "" {
+ result.ElemType.GoType = extType.Import.Alias + "." + extType.Type
+ } else {
+ result.ElemType.GoType = extType.Type
+ }
+ result.ElemType.setKind(extType.Hints.Kind)
+ if result.IsInterface || result.IsStream {
+ result.ElemType.IsNullable = false
+ }
+ if extType.Hints.Nullable != nil {
+ result.ElemType.IsNullable = swag.BoolValue(extType.Hints.Nullable)
+ }
+ // embedded external: by default consider validation is skipped for the external type
+ //
+ // NOTE: at this moment the template generates a type assertion, so this setting does not really matter
+ // for embedded types.
+ if extType.Hints.NoValidation != nil {
+ result.ElemType.SkipExternalValidation = swag.BoolValue(extType.Hints.NoValidation)
+ } else {
+ result.ElemType.SkipExternalValidation = true
+ }
+ } else {
+ // non-embedded external type: by default consider that validation is enabled (SkipExternalValidation: false)
+ result.SkipExternalValidation = swag.BoolValue(extType.Hints.NoValidation)
+ }
+
+ if nullable, ok := t.isNullableOverride(schema); ok {
+ result.IsNullable = nullable
+ }
+ }()
+ }
+
+ tpe := t.firstType(schema)
+ var returns bool
+
+ guardValidations(tpe, schema, schema.Type...)
+
+ returns, result, err = t.resolveSchemaRef(schema, isRequired)
+
+ if returns {
+ if !isAnonymous {
+ result.IsMap = false
+ result.IsComplexObject = true
+ }
+
+ return
+ }
+
+ defer func() {
+ result.setExtensions(schema, tpe)
+ }()
+
+ // special case of swagger type "file", rendered as io.ReadCloser interface
+ if t.firstType(schema) == file {
+ result.SwaggerType = file
+ result.IsPrimitive = true
+ result.IsNullable = false
+ result.GoType = formatMapping[str][binary]
+ result.IsStream = true
+ return
+ }
+
+ returns, result, err = t.resolveFormat(schema, isAnonymous, isRequired)
+ if returns {
+ return
+ }
+
+ result.IsNullable = t.isNullable(schema) || isRequired
+
+ switch tpe {
+ case array:
+ result, err = t.resolveArray(schema, isAnonymous, false)
+
+ case file, number, integer, boolean:
+ result.Extensions = schema.Extensions
+ result.GoType = typeMapping[tpe]
+ result.SwaggerType = tpe
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+
+ switch tpe {
+ case boolean:
+ result.IsPrimitive = true
+ result.IsCustomFormatter = false
+ result.IsNullable = nullableBool(schema, isRequired)
+ case number, integer:
+ result.IsPrimitive = true
+ result.IsCustomFormatter = false
+ result.IsNullable = nullableNumber(schema, isRequired)
+ case file:
+ }
+
+ case str:
+ result.GoType = str
+ result.SwaggerType = str
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+
+ result.IsPrimitive = true
+ result.IsNullable = nullableString(schema, isRequired)
+ result.Extensions = schema.Extensions
+
+ case object:
+ result, err = t.resolveObject(schema, isAnonymous)
+ if err != nil {
+ result = resolvedType{}
+ break
+ }
+ result.HasDiscriminator = schema.Discriminator != ""
+
+ case "null":
+ if schema.Validations().HasObjectValidations() {
+ // no explicit object type, but inferred from object validations:
+ // this makes the type a map[string]interface{} instead of interface{}
+ result, err = t.resolveObject(schema, isAnonymous)
+ if err != nil {
+ result = resolvedType{}
+ break
+ }
+ result.HasDiscriminator = schema.Discriminator != ""
+ break
+ }
+
+ result.GoType = iface
+ result.SwaggerType = object
+ result.IsNullable = false
+ result.IsInterface = true
+
+ default:
+ err = fmt.Errorf("unresolvable: %v (format %q)", schema.Type, schema.Format)
+ }
+
+ return
+}
+
+func warnSkipValidation(types interface{}) func(string, interface{}) {
+ return func(validation string, value interface{}) {
+ value = reflect.Indirect(reflect.ValueOf(value)).Interface()
+ log.Printf("warning: validation %s (value: %v) not compatible with type %v. Skipped", validation, value, types)
+ }
+}
+
+// guardValidations removes (with a warning) validations that don't fit with the schema type.
+//
+// Notice that the "enum" validation is allowed on any type but file.
+func guardValidations(tpe string, schema interface {
+ Validations() spec.SchemaValidations
+ SetValidations(spec.SchemaValidations)
+}, types ...string) {
+
+ v := schema.Validations()
+ if len(types) == 0 {
+ types = []string{tpe}
+ }
+ defer func() {
+ schema.SetValidations(v)
+ }()
+
+ if tpe != array {
+ v.ClearArrayValidations(warnSkipValidation(types))
+ }
+
+ if tpe != str && tpe != file {
+ v.ClearStringValidations(warnSkipValidation(types))
+ }
+
+ if tpe != object {
+ v.ClearObjectValidations(warnSkipValidation(types))
+ }
+
+ if tpe != number && tpe != integer {
+ v.ClearNumberValidations(warnSkipValidation(types))
+ }
+
+ if tpe == file {
+ // keep MinLength/MaxLength on file
+ if v.Pattern != "" {
+ warnSkipValidation(types)("pattern", v.Pattern)
+ v.Pattern = ""
+ }
+ if v.HasEnum() {
+ warnSkipValidation(types)("enum", v.Enum)
+ v.Enum = nil
+ }
+ }
+
+ // other cases: mapped as interface{}: no validations allowed but Enum
+}
+
+// guardFormatConflicts handles all conflicting properties
+// (for schema model or simple schema) when a format is set.
+//
+// At this moment, validation guards already handle all known conflicts, but for the
+// special case of binary (i.e. io.Reader).
+func guardFormatConflicts(format string, schema interface {
+ Validations() spec.SchemaValidations
+ SetValidations(spec.SchemaValidations)
+}) {
+ v := schema.Validations()
+ msg := fmt.Sprintf("for format %q", format)
+
+ // for this format, no additional validations are supported
+ if format == "binary" {
+ // no validations supported on binary fields at this moment (io.Reader)
+ v.ClearStringValidations(warnSkipValidation(msg))
+ if v.HasEnum() {
+ warnSkipValidation(msg)
+ v.Enum = nil
+ }
+ schema.SetValidations(v)
+ }
+ // more cases should be inserted here if they arise
+}
+
+// resolvedType is a swagger type that has been resolved and analyzed for usage
+// in a template
+type resolvedType struct {
+ IsAnonymous bool
+ IsArray bool
+ IsMap bool
+ IsInterface bool
+ IsPrimitive bool
+ IsCustomFormatter bool
+ IsAliased bool
+ IsNullable bool
+ IsStream bool
+ IsEmptyOmitted bool
+ IsJSONString bool
+ IsEnumCI bool
+ IsBase64 bool
+ IsExternal bool
+
+ // A tuple gets rendered as an anonymous struct with P{index} as property name
+ IsTuple bool
+ HasAdditionalItems bool
+
+ // A complex object gets rendered as a struct
+ IsComplexObject bool
+
+ // A polymorphic type
+ IsBaseType bool
+ HasDiscriminator bool
+
+ GoType string
+ Pkg string
+ PkgAlias string
+ AliasedType string
+ SwaggerType string
+ SwaggerFormat string
+ Extensions spec.Extensions
+
+ // The type of the element in a slice or map
+ ElemType *resolvedType
+
+ // IsMapNullOverride indicates that a nullable object is used within an
+ // aliased map. In this case, the reference is not rendered with a pointer
+ IsMapNullOverride bool
+
+ // IsSuperAlias indicates that the aliased type is really the same type,
+ // e.g. in golang, this translates to: type A = B
+ IsSuperAlias bool
+
+ // IsEmbedded applies to externally defined types. When embedded, a type
+ // is generated in models that embeds the external type, with the Validate
+ // method.
+ IsEmbedded bool
+
+ SkipExternalValidation bool
+}
+
+// Zero returns an initializer for the type
+func (rt resolvedType) Zero() string {
+ // if type is aliased, provide zero from the aliased type
+ if rt.IsAliased {
+ if zr, ok := zeroes[rt.AliasedType]; ok {
+ return rt.GoType + "(" + zr + ")"
+ }
+ }
+ // zero function provided as native or by strfmt function
+ if zr, ok := zeroes[rt.GoType]; ok {
+ return zr
+ }
+ // map and slice initializer
+ if rt.IsMap {
+ return "make(" + rt.GoType + ", 50)"
+ } else if rt.IsArray {
+ return "make(" + rt.GoType + ", 0, 50)"
+ }
+ // object initializer
+ if rt.IsTuple || rt.IsComplexObject {
+ if rt.IsNullable {
+ return "new(" + rt.GoType + ")"
+ }
+ return rt.GoType + "{}"
+ }
+ // interface initializer
+ if rt.IsInterface {
+ return "nil"
+ }
+
+ return ""
+}
+
+// ToString returns a string conversion for a type akin to a string
+func (rt resolvedType) ToString(value string) string {
+ if !rt.IsPrimitive || rt.SwaggerType != "string" || rt.IsStream {
+ return ""
+ }
+ if rt.IsCustomFormatter {
+ if rt.IsAliased {
+ return fmt.Sprintf("%s(%s).String()", rt.AliasedType, value)
+ }
+ return fmt.Sprintf("%s.String()", value)
+ }
+ var deref string
+ if rt.IsNullable {
+ deref = "*"
+ }
+ if rt.GoType == "string" || rt.GoType == "*string" {
+ return fmt.Sprintf("%s%s", deref, value)
+ }
+
+ return fmt.Sprintf("string(%s%s)", deref, value)
+}
+
+func (rt *resolvedType) setExtensions(schema *spec.Schema, origType string) {
+ rt.IsEnumCI = hasEnumCI(schema.Extensions)
+ rt.setIsEmptyOmitted(schema, origType)
+ rt.setIsJSONString(schema, origType)
+
+ if customTag, found := schema.Extensions[xGoCustomTag]; found {
+ if rt.Extensions == nil {
+ rt.Extensions = make(spec.Extensions)
+ }
+ rt.Extensions[xGoCustomTag] = customTag
+ }
+}
+
+func (rt *resolvedType) setIsEmptyOmitted(schema *spec.Schema, tpe string) {
+ if v, found := schema.Extensions[xOmitEmpty]; found {
+ omitted, cast := v.(bool)
+ rt.IsEmptyOmitted = omitted && cast
+ return
+ }
+ // array of primitives are by default not empty-omitted, but arrays of aliased type are
+ rt.IsEmptyOmitted = (tpe != array) || (tpe == array && rt.IsAliased)
+}
+
+func (rt *resolvedType) setIsJSONString(schema *spec.Schema, _ string) {
+ _, found := schema.Extensions[xGoJSONString]
+ if !found {
+ rt.IsJSONString = false
+ return
+ }
+ rt.IsJSONString = true
+}
+
+func (rt *resolvedType) setKind(kind string) {
+ if kind != "" {
+ debugLog("overriding kind for %s as %s", rt.GoType, kind)
+ }
+ switch kind {
+ case "map":
+ rt.IsMap = true
+ rt.IsArray = false
+ rt.IsComplexObject = false
+ rt.IsInterface = false
+ rt.IsStream = false
+ rt.IsTuple = false
+ rt.IsPrimitive = false
+ rt.SwaggerType = object
+ case "array":
+ rt.IsMap = false
+ rt.IsArray = true
+ rt.IsComplexObject = false
+ rt.IsInterface = false
+ rt.IsStream = false
+ rt.IsTuple = false
+ rt.IsPrimitive = false
+ rt.SwaggerType = array
+ case "object":
+ rt.IsMap = false
+ rt.IsArray = false
+ rt.IsComplexObject = true
+ rt.IsInterface = false
+ rt.IsStream = false
+ rt.IsTuple = false
+ rt.IsPrimitive = false
+ rt.SwaggerType = object
+ case "interface", "null":
+ rt.IsMap = false
+ rt.IsArray = false
+ rt.IsComplexObject = false
+ rt.IsInterface = true
+ rt.IsStream = false
+ rt.IsTuple = false
+ rt.IsPrimitive = false
+ rt.SwaggerType = iface
+ case "stream":
+ rt.IsMap = false
+ rt.IsArray = false
+ rt.IsComplexObject = false
+ rt.IsInterface = false
+ rt.IsStream = true
+ rt.IsTuple = false
+ rt.IsPrimitive = false
+ rt.SwaggerType = file
+ case "tuple":
+ rt.IsMap = false
+ rt.IsArray = false
+ rt.IsComplexObject = false
+ rt.IsInterface = false
+ rt.IsStream = false
+ rt.IsTuple = true
+ rt.IsPrimitive = false
+ rt.SwaggerType = array
+ case "primitive":
+ rt.IsMap = false
+ rt.IsArray = false
+ rt.IsComplexObject = false
+ rt.IsInterface = false
+ rt.IsStream = false
+ rt.IsTuple = false
+ rt.IsPrimitive = true
+ case "":
+ break
+ default:
+ log.Printf("warning: unsupported hint value for external type: %q. Skipped", kind)
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/README.md b/vendor/github.com/go-swagger/go-swagger/scan/README.md
new file mode 100644
index 000000000..1ae6f766f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/README.md
@@ -0,0 +1,3 @@
+# scan
+
+Pre go1.11 version of the go source parser, without support for go modules.
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/classifier.go b/vendor/github.com/go-swagger/go-swagger/scan/classifier.go
new file mode 100644
index 000000000..e674272d0
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/classifier.go
@@ -0,0 +1,166 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+ "log"
+ "regexp"
+
+ "golang.org/x/tools/go/loader"
+)
+
+type packageFilter struct {
+ Name string
+}
+
+func (pf *packageFilter) Matches(path string) bool {
+ matched, err := regexp.MatchString(pf.Name, path)
+ if err != nil {
+ log.Fatal(err)
+ }
+ return matched
+}
+
+type packageFilters []packageFilter
+
+func (pf packageFilters) HasFilters() bool {
+ return len(pf) > 0
+}
+
+func (pf packageFilters) Matches(path string) bool {
+ for _, mod := range pf {
+ if mod.Matches(path) {
+ return true
+ }
+ }
+ return false
+}
+
+type classifiedProgram struct {
+ Meta []*ast.File
+ Models []*ast.File
+ Routes []*ast.File
+ Operations []*ast.File
+ Parameters []*ast.File
+ Responses []*ast.File
+}
+
+// programClassifier classifies the files of a program into buckets
+// for processing by a swagger spec generator. This buckets files in
+// 3 groups: Meta, Models and Operations.
+//
+// # Each of these buckets is then processed with an appropriate parsing strategy
+//
+// When there are Include or Exclude filters provide they are used to limit the
+// candidates prior to parsing.
+// The include filters take precedence over the excludes. So when something appears
+// in both filters it will be included.
+type programClassifier struct {
+ Includes packageFilters
+ Excludes packageFilters
+}
+
+func (pc *programClassifier) Classify(prog *loader.Program) (*classifiedProgram, error) {
+ var cp classifiedProgram
+ for pkg, pkgInfo := range prog.AllPackages {
+ if Debug {
+ log.Printf("analyzing: %s\n", pkg.Path())
+ }
+ if pc.Includes.HasFilters() {
+ if !pc.Includes.Matches(pkg.Path()) {
+ continue
+ }
+ } else if pc.Excludes.HasFilters() {
+ if pc.Excludes.Matches(pkg.Path()) {
+ continue
+ }
+ }
+
+ for _, file := range pkgInfo.Files {
+ var ro, op, mt, pm, rs, mm bool // only add a particular file once
+ for _, comments := range file.Comments {
+ var seenStruct string
+ for _, cline := range comments.List {
+ if cline != nil {
+ matches := rxSwaggerAnnotation.FindStringSubmatch(cline.Text)
+ if len(matches) > 1 {
+ switch matches[1] {
+ case "route":
+ if !ro {
+ cp.Routes = append(cp.Routes, file)
+ ro = true
+ }
+ case "operation":
+ if !op {
+ cp.Operations = append(cp.Operations, file)
+ op = true
+ }
+ case "model":
+ if !mm {
+ cp.Models = append(cp.Models, file)
+ mm = true
+ }
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "meta":
+ if !mt {
+ cp.Meta = append(cp.Meta, file)
+ mt = true
+ }
+ case "parameters":
+ if !pm {
+ cp.Parameters = append(cp.Parameters, file)
+ pm = true
+ }
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "response":
+ if !rs {
+ cp.Responses = append(cp.Responses, file)
+ rs = true
+ }
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "strfmt", "name", "discriminated", "file", "enum", "default", "alias", "type":
+ // TODO: perhaps collect these and pass along to avoid lookups later on
+ case "allOf":
+ case "ignore":
+ default:
+ return nil, fmt.Errorf("classifier: unknown swagger annotation %q", matches[1])
+ }
+ }
+
+ }
+ }
+ }
+ }
+ }
+
+ return &cp, nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/doc.go b/vendor/github.com/go-swagger/go-swagger/scan/doc.go
new file mode 100644
index 000000000..2bc415a8f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/doc.go
@@ -0,0 +1,89 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package scan provides a scanner for go files that produces a swagger spec document.
+
+This package is intended for pre-go1.11 versions, and does not support go modules.
+
+You give it a main file and it will parse all the files that are required by that main
+package to produce a swagger specification.
+
+To use you can add a go:generate comment to your main file for example:
+
+ //go:generate swagger generate spec
+
+The following annotations exist:
+
+swagger:meta
+
+The swagger:meta annotation flags a file as source for metadata about the API.
+This is typically a doc.go file with your package documentation.
+
+You can specify a Consumes and Produces key which has a new content type on each line
+Schemes is a tag that is required and allows for a comma separated string composed of:
+http, https, ws or wss
+
+Host and BasePath can be specified but those values will be defaults,
+they should get substituted when serving the swagger spec.
+
+Default parameters and responses are not supported at this stage, for those you can edit the template json.
+
+swagger:strfmt [name]
+
+A swagger:strfmt annotation names a type as a string formatter. The name is mandatory and that is
+what will be used as format name for this particular string format.
+String formats should only be used for very well known formats.
+
+swagger:model [?model name]
+
+A swagger:model annotation optionally gets a model name as extra data on the line.
+when this appears anywhere in a comment for a struct, then that struct becomes a schema
+in the definitions object of swagger.
+
+The struct gets analyzed and all the collected models are added to the tree.
+The refs are tracked separately so that they can be renamed later on.
+
+When this annotation is found to be on an interface instead of a struct, the properties are provided
+through exported nullary methods.
+
+A property of an interface model can have a Discriminator: true annotation to mark that field as
+the field that will contain the discriminator value.
+
+swagger:route [method] [path pattern] [operation id] [?tag1 tag2 tag3]
+
+A swagger:route annotation links a path to a method.
+This operation gets a unique id, which is used in various places as method name.
+One such usage is in method names for client generation for example.
+
+Because there are many routers available, this tool does not try to parse the paths
+you provided to your routing library of choice. So you have to specify your path pattern
+yourself in valid swagger syntax.
+
+swagger:params [operationid1 operationid2]
+
+Links a struct to one or more operations. The params in the resulting swagger spec can be composed of several structs.
+There are no guarantees given on how property name overlaps are resolved when several structs apply to the same operation.
+This tag works very similarly to the swagger:model tag except that it produces valid parameter objects instead of schema
+objects.
+
+swagger:response [?response name]
+
+Reads a struct decorated with swagger:response and uses that information to fill up the headers and the schema for a response.
+A swagger:route can specify a response name for a status code and then the matching response will be used for that operation in the swagger definition.
+*/
+package scan
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/enum.go b/vendor/github.com/go-swagger/go-swagger/scan/enum.go
new file mode 100644
index 000000000..d1ecc9c87
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/enum.go
@@ -0,0 +1,84 @@
+//go:build !go1.11
+// +build !go1.11
+
+package scan
+
+import (
+ "go/ast"
+ "strconv"
+ "strings"
+ "unicode"
+)
+
+func upperSnakeCase(s string) string {
+ in := []rune(s)
+ isLower := func(idx int) bool {
+ return idx >= 0 && idx < len(in) && unicode.IsLower(in[idx])
+ }
+
+ out := make([]rune, 0, len(in)+len(in)/2)
+
+ for i, r := range in {
+ if unicode.IsUpper(r) {
+ r = unicode.ToLower(r)
+ if i > 0 && in[i-1] != '_' && (isLower(i-1) || isLower(i+1)) {
+ out = append(out, '_')
+ }
+ }
+ out = append(out, r)
+ }
+
+ return strings.ToUpper(string(out))
+}
+
+func getEnumBasicLitValue(basicLit *ast.BasicLit) interface{} {
+ switch basicLit.Kind.String() {
+ case "INT":
+ if result, err := strconv.ParseInt(basicLit.Value, 10, 64); err == nil {
+ return result
+ }
+ case "FLOAT":
+ if result, err := strconv.ParseFloat(basicLit.Value, 64); err == nil {
+ return result
+ }
+ default:
+ return strings.Trim(basicLit.Value, "\"")
+ }
+ return nil
+}
+
+func getEnumValues(file *ast.File, typeName string) (list []interface{}) {
+ for _, decl := range file.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+
+ if !ok {
+ continue
+ }
+
+ if genDecl.Tok.String() == "const" {
+ for _, spec := range genDecl.Specs {
+ if valueSpec, ok := spec.(*ast.ValueSpec); ok {
+ switch valueSpec.Type.(type) {
+ case *ast.Ident:
+ if valueSpec.Type.(*ast.Ident).Name == typeName {
+ if basicLit, ok := valueSpec.Values[0].(*ast.BasicLit); ok {
+ list = append(list, getEnumBasicLitValue(basicLit))
+ }
+ }
+ default:
+ var name = valueSpec.Names[0].Name
+ if strings.HasPrefix(name, upperSnakeCase(typeName)) {
+ var values = strings.SplitN(name, "__", 2)
+ if len(values) == 2 {
+ list = append(list, values[1])
+ }
+ }
+ }
+
+ }
+
+ }
+ }
+ }
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/meta.go b/vendor/github.com/go-swagger/go-swagger/scan/meta.go
new file mode 100644
index 000000000..f5b5ed5dd
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/meta.go
@@ -0,0 +1,246 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/mail"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+func metaTOSSetter(meta *spec.Info) func([]string) {
+ return func(lines []string) {
+ meta.TermsOfService = joinDropLast(lines)
+ }
+}
+
+func metaConsumesSetter(meta *spec.Swagger) func([]string) {
+ return func(consumes []string) { meta.Consumes = consumes }
+}
+
+func metaProducesSetter(meta *spec.Swagger) func([]string) {
+ return func(produces []string) { meta.Produces = produces }
+}
+
+func metaSchemeSetter(meta *spec.Swagger) func([]string) {
+ return func(schemes []string) { meta.Schemes = schemes }
+}
+
+func metaSecuritySetter(meta *spec.Swagger) func([]map[string][]string) {
+ return func(secDefs []map[string][]string) { meta.Security = secDefs }
+}
+
+func metaSecurityDefinitionsSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.SecurityDefinitions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ meta.SecurityDefinitions = jsonData
+ return nil
+ }
+}
+
+func metaVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ meta.Extensions = jsonData
+ return nil
+ }
+}
+
+func infoVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ meta.Info.Extensions = jsonData
+ return nil
+ }
+}
+
+func newMetaParser(swspec *spec.Swagger) *sectionedParser {
+ sp := new(sectionedParser)
+ if swspec.Info == nil {
+ swspec.Info = new(spec.Info)
+ }
+ info := swspec.Info
+ sp.setTitle = func(lines []string) {
+ tosave := joinDropLast(lines)
+ if len(tosave) > 0 {
+ tosave = rxStripTitleComments.ReplaceAllString(tosave, "")
+ }
+ info.Title = tosave
+ }
+ sp.setDescription = func(lines []string) { info.Description = joinDropLast(lines) }
+ sp.taggers = []tagParser{
+ newMultiLineTagParser("TOS", newMultilineDropEmptyParser(rxTOS, metaTOSSetter(info)), false),
+ newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, metaConsumesSetter(swspec)), false),
+ newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, metaProducesSetter(swspec)), false),
+ newSingleLineTagParser("Schemes", newSetSchemes(metaSchemeSetter(swspec))),
+ newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, metaSecuritySetter(swspec)), false),
+ newMultiLineTagParser("SecurityDefinitions", newYamlParser(rxSecurity, metaSecurityDefinitionsSetter(swspec)), true),
+ newSingleLineTagParser("Version", &setMetaSingle{swspec, rxVersion, setInfoVersion}),
+ newSingleLineTagParser("Host", &setMetaSingle{swspec, rxHost, setSwaggerHost}),
+ newSingleLineTagParser("BasePath", &setMetaSingle{swspec, rxBasePath, setSwaggerBasePath}),
+ newSingleLineTagParser("Contact", &setMetaSingle{swspec, rxContact, setInfoContact}),
+ newSingleLineTagParser("License", &setMetaSingle{swspec, rxLicense, setInfoLicense}),
+ newMultiLineTagParser("YAMLInfoExtensionsBlock", newYamlParser(rxInfoExtensions, infoVendorExtensibleSetter(swspec)), true),
+ newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, metaVendorExtensibleSetter(swspec)), true),
+ }
+ return sp
+}
+
+type setMetaSingle struct {
+ spec *spec.Swagger
+ rx *regexp.Regexp
+ set func(spec *spec.Swagger, lines []string) error
+}
+
+func (s *setMetaSingle) Matches(line string) bool {
+ return s.rx.MatchString(line)
+}
+
+func (s *setMetaSingle) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := s.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ return s.set(s.spec, []string{matches[1]})
+ }
+ return nil
+}
+
+func setSwaggerHost(swspec *spec.Swagger, lines []string) error {
+ lns := lines
+ if len(lns) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ lns = []string{"localhost"}
+ }
+ swspec.Host = lns[0]
+ return nil
+}
+
+func setSwaggerBasePath(swspec *spec.Swagger, lines []string) error {
+ var ln string
+ if len(lines) > 0 {
+ ln = lines[0]
+ }
+ swspec.BasePath = ln
+ return nil
+}
+
+func setInfoVersion(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 {
+ return nil
+ }
+ info := safeInfo(swspec)
+ info.Version = strings.TrimSpace(lines[0])
+ return nil
+}
+
+func setInfoContact(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ contact, err := parseContactInfo(lines[0])
+ if err != nil {
+ return err
+ }
+ info := safeInfo(swspec)
+ info.Contact = contact
+ return nil
+}
+
+func parseContactInfo(line string) (*spec.ContactInfo, error) {
+ nameEmail, url := splitURL(line)
+ var name, email string
+ if len(nameEmail) > 0 {
+ addr, err := mail.ParseAddress(nameEmail)
+ if err != nil {
+ return nil, err
+ }
+ name, email = addr.Name, addr.Address
+ }
+ return &spec.ContactInfo{
+ URL: url,
+ Name: name,
+ Email: email,
+ }, nil
+}
+
+func setInfoLicense(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ info := safeInfo(swspec)
+ line := lines[0]
+ name, url := splitURL(line)
+ info.License = &spec.License{
+ Name: name,
+ URL: url,
+ }
+ return nil
+}
+
+func safeInfo(swspec *spec.Swagger) *spec.Info {
+ if swspec.Info == nil {
+ swspec.Info = new(spec.Info)
+ }
+ return swspec.Info
+}
+
+// httpFTPScheme matches http://, https://, ws://, wss://
+var httpFTPScheme = regexp.MustCompile("(?:(?:ht|f)tp|ws)s?://")
+
+func splitURL(line string) (notURL, url string) {
+ str := strings.TrimSpace(line)
+ parts := httpFTPScheme.FindStringIndex(str)
+ if len(parts) == 0 {
+ if len(str) > 0 {
+ notURL = str
+ }
+ return
+ }
+ if len(parts) > 0 {
+ notURL = strings.TrimSpace(str[:parts[0]])
+ url = strings.TrimSpace(str[parts[0]:])
+ }
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/operations.go b/vendor/github.com/go-swagger/go-swagger/scan/operations.go
new file mode 100644
index 000000000..31e2ea5a9
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/operations.go
@@ -0,0 +1,85 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/go-openapi/spec"
+
+ "golang.org/x/tools/go/loader"
+)
+
+func newOperationsParser(prog *loader.Program) *operationsParser {
+ return &operationsParser{
+ program: prog,
+ }
+}
+
+type operationsParser struct {
+ program *loader.Program
+ definitions map[string]spec.Schema
+ operations map[string]*spec.Operation
+ responses map[string]spec.Response
+}
+
+func (op *operationsParser) Parse(gofile *ast.File, target interface{}, includeTags map[string]bool, excludeTags map[string]bool) error {
+ tgt := target.(*spec.Paths)
+ for _, comsec := range gofile.Comments {
+ content := parsePathAnnotation(rxOperation, comsec.List)
+
+ if content.Method == "" {
+ continue // it's not, next!
+ }
+
+ if !shouldAcceptTag(content.Tags, includeTags, excludeTags) {
+ if Debug {
+ fmt.Printf("operation %s %s is ignored due to tag rules\n", content.Method, content.Path)
+ }
+ continue
+ }
+
+ pthObj := tgt.Paths[content.Path]
+
+ op := setPathOperation(
+ content.Method, content.ID,
+ &pthObj, op.operations[content.ID])
+
+ op.Tags = content.Tags
+
+ sp := new(yamlSpecScanner)
+ sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
+
+ if err := sp.Parse(content.Remaining); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+ if err := sp.UnmarshalSpec(op.UnmarshalJSON); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+
+ if tgt.Paths == nil {
+ tgt.Paths = make(map[string]spec.PathItem)
+ }
+
+ tgt.Paths[content.Path] = pthObj
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/parameters.go b/vendor/github.com/go-swagger/go-swagger/scan/parameters.go
new file mode 100644
index 000000000..58d96ebe3
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/parameters.go
@@ -0,0 +1,515 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+ "strings"
+
+ "github.com/go-openapi/spec"
+ "golang.org/x/tools/go/loader"
+)
+
+type operationValidationBuilder interface {
+ validationBuilder
+ SetCollectionFormat(string)
+}
+
+type paramTypable struct {
+ param *spec.Parameter
+}
+
+func (pt paramTypable) Level() int { return 0 }
+
+func (pt paramTypable) Typed(tpe, format string) {
+ pt.param.Typed(tpe, format)
+}
+
+func (pt paramTypable) WithEnum(values ...interface{}) {
+ pt.param.WithEnum(values...)
+}
+
+func (pt paramTypable) SetRef(ref spec.Ref) {
+ pt.param.Ref = ref
+}
+
+func (pt paramTypable) Items() swaggerTypable {
+ bdt, schema := bodyTypable(pt.param.In, pt.param.Schema)
+ if bdt != nil {
+ pt.param.Schema = schema
+ return bdt
+ }
+
+ if pt.param.Items == nil {
+ pt.param.Items = new(spec.Items)
+ }
+ pt.param.Type = "array"
+ return itemsTypable{pt.param.Items, 1}
+}
+
+func (pt paramTypable) Schema() *spec.Schema {
+ if pt.param.In != "body" {
+ return nil
+ }
+ if pt.param.Schema == nil {
+ pt.param.Schema = new(spec.Schema)
+ }
+ return pt.param.Schema
+}
+
+type itemsTypable struct {
+ items *spec.Items
+ level int
+}
+
+func (pt itemsTypable) Level() int { return pt.level }
+
+func (pt itemsTypable) Typed(tpe, format string) {
+ pt.items.Typed(tpe, format)
+}
+
+func (pt itemsTypable) SetRef(ref spec.Ref) {
+ pt.items.Ref = ref
+}
+
+func (pt itemsTypable) WithEnum(values ...interface{}) {
+ pt.items.WithEnum(values...)
+}
+
+func (pt itemsTypable) Schema() *spec.Schema {
+ return nil
+}
+
+func (pt itemsTypable) Items() swaggerTypable {
+ if pt.items.Items == nil {
+ pt.items.Items = new(spec.Items)
+ }
+ pt.items.Type = "array"
+ return itemsTypable{pt.items.Items, pt.level + 1}
+}
+
+type paramValidations struct {
+ current *spec.Parameter
+}
+
+func (sv paramValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv paramValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv paramValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv paramValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv paramValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv paramValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv paramValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv paramValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv paramValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv paramValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv paramValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv paramValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv paramValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+type itemsValidations struct {
+ current *spec.Items
+}
+
+func (sv itemsValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv itemsValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv itemsValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv itemsValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv itemsValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv itemsValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv itemsValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv itemsValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv itemsValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv itemsValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv itemsValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv itemsValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv itemsValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+type paramDecl struct {
+ File *ast.File
+ Decl *ast.GenDecl
+ TypeSpec *ast.TypeSpec
+ OperationIDs []string
+}
+
+func (sd *paramDecl) inferOperationIDs() (opids []string) {
+ if len(sd.OperationIDs) > 0 {
+ opids = sd.OperationIDs
+ return
+ }
+
+ if sd.Decl.Doc != nil {
+ for _, cmt := range sd.Decl.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxParametersOverride.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ for _, pt := range strings.Split(matches[1], " ") {
+ tr := strings.TrimSpace(pt)
+ if len(tr) > 0 {
+ opids = append(opids, tr)
+ }
+ }
+ }
+ }
+ }
+ }
+ sd.OperationIDs = append(sd.OperationIDs, opids...)
+ return
+}
+
+func newParameterParser(prog *loader.Program) *paramStructParser {
+ scp := new(paramStructParser)
+ scp.program = prog
+ scp.scp = newSchemaParser(prog)
+ return scp
+}
+
+type paramStructParser struct {
+ program *loader.Program
+ postDecls []schemaDecl
+ scp *schemaParser
+}
+
+// Parse will traverse a file and look for parameters.
+func (pp *paramStructParser) Parse(gofile *ast.File, target interface{}) error {
+ tgt := target.(map[string]*spec.Operation)
+ for _, decl := range gofile.Decls {
+ switch x1 := decl.(type) {
+ // Check for parameters at the package level.
+ case *ast.GenDecl:
+ for _, spc := range x1.Specs {
+ switch x2 := spc.(type) {
+ case *ast.TypeSpec:
+ sd := paramDecl{gofile, x1, x2, nil}
+ sd.inferOperationIDs()
+ if err := pp.parseDecl(tgt, sd); err != nil {
+ return err
+ }
+ }
+ }
+ // Check for parameters inside functions.
+ case *ast.FuncDecl:
+ for _, b := range x1.Body.List {
+ switch x2 := b.(type) {
+ case *ast.DeclStmt:
+ switch x3 := x2.Decl.(type) {
+ case *ast.GenDecl:
+ for _, spc := range x3.Specs {
+ switch x4 := spc.(type) {
+ case *ast.TypeSpec:
+ sd := paramDecl{gofile, x3, x4, nil}
+ sd.inferOperationIDs()
+ if err := pp.parseDecl(tgt, sd); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (pp *paramStructParser) parseDecl(operations map[string]*spec.Operation, decl paramDecl) error {
+ // check if there is a swagger:parameters tag that is followed by one or more words,
+ // these words are the ids of the operations this parameter struct applies to
+ // once type name is found convert it to a schema, by looking up the schema in the
+ // parameters dictionary that got passed into this parse method
+ for _, opid := range decl.inferOperationIDs() {
+ operation, ok := operations[opid]
+ if !ok {
+ operation = new(spec.Operation)
+ operations[opid] = operation
+ operation.ID = opid
+ }
+
+ // analyze struct body for fields etc
+ // each exported struct field:
+ // * gets a type mapped to a go primitive
+ // * perhaps gets a format
+ // * has to document the validations that apply for the type and the field
+ // * when the struct field points to a model it becomes a ref: #/definitions/ModelName
+ // * comments that aren't tags is used as the description
+ if tpe, ok := decl.TypeSpec.Type.(*ast.StructType); ok {
+ if err := pp.parseStructType(decl.File, operation, tpe, make(map[string]spec.Parameter)); err != nil {
+ return err
+ }
+ }
+
+ //operations[opid] = operation
+ }
+ return nil
+}
+
+func (pp *paramStructParser) parseEmbeddedStruct(gofile *ast.File, operation *spec.Operation, expr ast.Expr, seenPreviously map[string]spec.Parameter) error {
+ switch tpe := expr.(type) {
+ case *ast.Ident:
+ // do lookup of type
+ // take primitives into account, they should result in an error for swagger
+ pkg, err := pp.scp.packageForFile(gofile, tpe)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ if st, ok := ts.Type.(*ast.StructType); ok {
+ return pp.parseStructType(file, operation, st, seenPreviously)
+ }
+ case *ast.SelectorExpr:
+ // look up package, file and then type
+ pkg, err := pp.scp.packageForSelector(gofile, tpe.X)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ if st, ok := ts.Type.(*ast.StructType); ok {
+ return pp.parseStructType(file, operation, st, seenPreviously)
+ }
+ case *ast.StarExpr:
+ return pp.parseEmbeddedStruct(gofile, operation, tpe.X, seenPreviously)
+ }
+ fmt.Printf("3%#v\n", expr)
+ return fmt.Errorf("unable to resolve embedded struct for: %v", expr)
+}
+
+func (pp *paramStructParser) parseStructType(gofile *ast.File, operation *spec.Operation, tpe *ast.StructType, seenPreviously map[string]spec.Parameter) error {
+ if tpe.Fields != nil {
+ pt := seenPreviously
+
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) == 0 {
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := pp.parseEmbeddedStruct(gofile, operation, fld.Type, pt); err != nil {
+ return err
+ }
+ }
+ }
+
+ // a slice used to keep track of the sequence of the map keys, as maps does not keep to any specific sequence (since Go-1.4)
+ sequence := []string{}
+
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() {
+ gnm := fld.Names[0].Name
+ nm, ignore, _, err := parseJSONTag(fld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ in := "query"
+ // scan for param location first, this changes some behavior down the line
+ if fld.Doc != nil {
+ for _, cmt := range fld.Doc.List {
+ for _, line := range strings.Split(cmt.Text, "\n") {
+ matches := rxIn.FindStringSubmatch(line)
+ if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
+ in = strings.TrimSpace(matches[1])
+ }
+ }
+ }
+ }
+
+ ps := pt[nm]
+ ps.In = in
+ var pty swaggerTypable = paramTypable{&ps}
+ if in == "body" {
+ pty = schemaTypable{pty.Schema(), 0}
+ }
+ if in == "formData" && fld.Doc != nil && fileParam(fld.Doc) {
+ pty.Typed("file", "")
+ } else {
+ if err := pp.scp.parseNamedType(gofile, fld.Type, pty); err != nil {
+ return err
+ }
+ }
+
+ if strfmtName, ok := strfmtName(fld.Doc); ok {
+ ps.Typed("string", strfmtName)
+ ps.Ref = spec.Ref{}
+ }
+
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
+ if ps.Ref.String() == "" {
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
+ newSingleLineTagParser("maximum", &setMaximum{paramValidations{&ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{paramValidations{&ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{paramValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{paramValidations{&ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{paramValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{paramValidations{&ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("collectionFormat", &setCollectionFormat{paramValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{paramValidations{&ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{paramValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{paramValidations{&ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{paramValidations{&ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxExampleFmt, "")}),
+ newSingleLineTagParser("required", &setRequiredParam{&ps}),
+ }
+
+ itemsTaggers := func(items *spec.Items, level int) []tagParser {
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
+ if items == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.SelectorExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.Sel, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", nm)
+ }
+ }
+
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := fld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return err
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ } else {
+
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
+ newSingleLineTagParser("required", &matchOnlyParam{&ps, rxRequired}),
+ }
+ }
+ if err := sp.Parse(fld.Doc); err != nil {
+ return err
+ }
+ if ps.In == "path" {
+ ps.Required = true
+ }
+
+ if ps.Name == "" {
+ ps.Name = nm
+ }
+
+ if nm != gnm {
+ addExtension(&ps.VendorExtensible, "x-go-name", gnm)
+ }
+ pt[nm] = ps
+ sequence = append(sequence, nm)
+ }
+ }
+
+ for _, k := range sequence {
+ p := pt[k]
+ for i, v := range operation.Parameters {
+ if v.Name == k {
+ operation.Parameters = append(operation.Parameters[:i], operation.Parameters[i+1:]...)
+ break
+ }
+ }
+ operation.Parameters = append(operation.Parameters, p)
+ }
+ }
+
+ return nil
+}
+
+func isAliasParam(prop swaggerTypable) bool {
+ var isParam bool
+ if param, ok := prop.(paramTypable); ok {
+ isParam = param.param.In == "query" ||
+ param.param.In == "path" ||
+ param.param.In == "formData"
+ }
+ return isParam
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/path.go b/vendor/github.com/go-swagger/go-swagger/scan/path.go
new file mode 100644
index 000000000..7302d41c3
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/path.go
@@ -0,0 +1,151 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "go/ast"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+type parsedPathContent struct {
+ Method, Path, ID string
+ Tags []string
+ Remaining *ast.CommentGroup
+}
+
+func parsePathAnnotation(annotation *regexp.Regexp, lines []*ast.Comment) (cnt parsedPathContent) {
+ var justMatched bool
+
+ for _, cmt := range lines {
+ for _, line := range strings.Split(cmt.Text, "\n") {
+ matches := annotation.FindStringSubmatch(line)
+ if len(matches) > 3 {
+ cnt.Method, cnt.Path, cnt.ID = matches[1], matches[2], matches[len(matches)-1]
+ cnt.Tags = rxSpace.Split(matches[3], -1)
+ if len(matches[3]) == 0 {
+ cnt.Tags = nil
+ }
+ justMatched = true
+ } else if cnt.Method != "" {
+ if cnt.Remaining == nil {
+ cnt.Remaining = new(ast.CommentGroup)
+ }
+ if !justMatched || strings.TrimSpace(rxStripComments.ReplaceAllString(line, "")) != "" {
+ cc := new(ast.Comment)
+ cc.Slash = cmt.Slash
+ cc.Text = line
+ cnt.Remaining.List = append(cnt.Remaining.List, cc)
+ justMatched = false
+ }
+ }
+ }
+ }
+
+ return
+}
+
+func setPathOperation(method, id string, pthObj *spec.PathItem, op *spec.Operation) *spec.Operation {
+ if op == nil {
+ op = new(spec.Operation)
+ op.ID = id
+ }
+
+ switch strings.ToUpper(method) {
+ case "GET":
+ if pthObj.Get != nil {
+ if id == pthObj.Get.ID {
+ op = pthObj.Get
+ } else {
+ pthObj.Get = op
+ }
+ } else {
+ pthObj.Get = op
+ }
+
+ case "POST":
+ if pthObj.Post != nil {
+ if id == pthObj.Post.ID {
+ op = pthObj.Post
+ } else {
+ pthObj.Post = op
+ }
+ } else {
+ pthObj.Post = op
+ }
+
+ case "PUT":
+ if pthObj.Put != nil {
+ if id == pthObj.Put.ID {
+ op = pthObj.Put
+ } else {
+ pthObj.Put = op
+ }
+ } else {
+ pthObj.Put = op
+ }
+
+ case "PATCH":
+ if pthObj.Patch != nil {
+ if id == pthObj.Patch.ID {
+ op = pthObj.Patch
+ } else {
+ pthObj.Patch = op
+ }
+ } else {
+ pthObj.Patch = op
+ }
+
+ case "HEAD":
+ if pthObj.Head != nil {
+ if id == pthObj.Head.ID {
+ op = pthObj.Head
+ } else {
+ pthObj.Head = op
+ }
+ } else {
+ pthObj.Head = op
+ }
+
+ case "DELETE":
+ if pthObj.Delete != nil {
+ if id == pthObj.Delete.ID {
+ op = pthObj.Delete
+ } else {
+ pthObj.Delete = op
+ }
+ } else {
+ pthObj.Delete = op
+ }
+
+ case "OPTIONS":
+ if pthObj.Options != nil {
+ if id == pthObj.Options.ID {
+ op = pthObj.Options
+ } else {
+ pthObj.Options = op
+ }
+ } else {
+ pthObj.Options = op
+ }
+ }
+
+ return op
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/responses.go b/vendor/github.com/go-swagger/go-swagger/scan/responses.go
new file mode 100644
index 000000000..327b8a488
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/responses.go
@@ -0,0 +1,453 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+ "strings"
+
+ "golang.org/x/tools/go/loader"
+
+ "github.com/go-openapi/spec"
+)
+
+type responseTypable struct {
+ in string
+ header *spec.Header
+ response *spec.Response
+}
+
+func (ht responseTypable) Level() int { return 0 }
+
+func (ht responseTypable) Typed(tpe, format string) {
+ ht.header.Typed(tpe, format)
+}
+
+func (ht responseTypable) WithEnum(values ...interface{}) {
+ ht.header.WithEnum(values)
+}
+
+func bodyTypable(in string, schema *spec.Schema) (swaggerTypable, *spec.Schema) {
+ if in == "body" {
+ // get the schema for items on the schema property
+ if schema == nil {
+ schema = new(spec.Schema)
+ }
+ if schema.Items == nil {
+ schema.Items = new(spec.SchemaOrArray)
+ }
+ if schema.Items.Schema == nil {
+ schema.Items.Schema = new(spec.Schema)
+ }
+ schema.Typed("array", "")
+ return schemaTypable{schema.Items.Schema, 0}, schema
+ }
+ return nil, nil
+}
+
+func (ht responseTypable) Items() swaggerTypable {
+ bdt, schema := bodyTypable(ht.in, ht.response.Schema)
+ if bdt != nil {
+ ht.response.Schema = schema
+ return bdt
+ }
+
+ if ht.header.Items == nil {
+ ht.header.Items = new(spec.Items)
+ }
+ ht.header.Type = "array"
+ return itemsTypable{ht.header.Items, 1}
+}
+
+func (ht responseTypable) SetRef(ref spec.Ref) {
+ // having trouble seeing the usefulness of this one here
+ ht.Schema().Ref = ref
+}
+
+func (ht responseTypable) Schema() *spec.Schema {
+ if ht.response.Schema == nil {
+ ht.response.Schema = new(spec.Schema)
+ }
+ return ht.response.Schema
+}
+
+func (ht responseTypable) SetSchema(schema *spec.Schema) {
+ ht.response.Schema = schema
+}
+
+func (ht responseTypable) CollectionOf(items *spec.Items, format string) {
+ ht.header.CollectionOf(items, format)
+}
+
+type headerValidations struct {
+ current *spec.Header
+}
+
+func (sv headerValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv headerValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv headerValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv headerValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv headerValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv headerValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv headerValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv headerValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv headerValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv headerValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv headerValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv headerValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv headerValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+func newResponseDecl(file *ast.File, decl *ast.GenDecl, ts *ast.TypeSpec) responseDecl {
+ var rd responseDecl
+ rd.File = file
+ rd.Decl = decl
+ rd.TypeSpec = ts
+ rd.inferNames()
+ return rd
+}
+
+type responseDecl struct {
+ File *ast.File
+ Decl *ast.GenDecl
+ TypeSpec *ast.TypeSpec
+ GoName string
+ Name string
+ annotated bool
+}
+
+func (sd *responseDecl) hasAnnotation() bool {
+ sd.inferNames()
+ return sd.annotated
+}
+
+func (sd *responseDecl) inferNames() (goName string, name string) {
+ if sd.GoName != "" {
+ goName, name = sd.GoName, sd.Name
+ return
+ }
+ goName = sd.TypeSpec.Name.Name
+ name = goName
+ if sd.Decl.Doc != nil {
+ DECLS:
+ for _, cmt := range sd.Decl.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxResponseOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ sd.annotated = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ name = matches[1]
+ break DECLS
+ }
+ }
+ }
+ }
+ sd.GoName = goName
+ sd.Name = name
+ return
+}
+
+func newResponseParser(prog *loader.Program) *responseParser {
+ return &responseParser{prog, nil, newSchemaParser(prog)}
+}
+
+type responseParser struct {
+ program *loader.Program
+ postDecls []schemaDecl
+ scp *schemaParser
+}
+
+func (rp *responseParser) Parse(gofile *ast.File, target interface{}) error {
+ tgt := target.(map[string]spec.Response)
+ for _, decl := range gofile.Decls {
+ switch x1 := decl.(type) {
+ // Check for parameters at the package level.
+ case *ast.GenDecl:
+ for _, spc := range x1.Specs {
+ switch x2 := spc.(type) {
+ case *ast.TypeSpec:
+ sd := newResponseDecl(gofile, x1, x2)
+ if sd.hasAnnotation() {
+ if err := rp.parseDecl(tgt, sd); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ // Check for parameters inside functions.
+ case *ast.FuncDecl:
+ for _, b := range x1.Body.List {
+ switch x2 := b.(type) {
+ case *ast.DeclStmt:
+ switch x3 := x2.Decl.(type) {
+ case *ast.GenDecl:
+ for _, spc := range x3.Specs {
+ switch x4 := spc.(type) {
+ case *ast.TypeSpec:
+ sd := newResponseDecl(gofile, x3, x4)
+ if sd.hasAnnotation() {
+ if err := rp.parseDecl(tgt, sd); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (rp *responseParser) parseDecl(responses map[string]spec.Response, decl responseDecl) error {
+ // check if there is a swagger:parameters tag that is followed by one or more words,
+ // these words are the ids of the operations this parameter struct applies to
+ // once type name is found convert it to a schema, by looking up the schema in the
+ // parameters dictionary that got passed into this parse method
+ response := responses[decl.Name]
+ resPtr := &response
+
+ // analyze doc comment for the model
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) { resPtr.Description = joinDropLast(lines) }
+ if err := sp.Parse(decl.Decl.Doc); err != nil {
+ return err
+ }
+
+ // analyze struct body for fields etc
+ // each exported struct field:
+ // * gets a type mapped to a go primitive
+ // * perhaps gets a format
+ // * has to document the validations that apply for the type and the field
+ // * when the struct field points to a model it becomes a ref: #/definitions/ModelName
+ // * comments that aren't tags is used as the description
+ if tpe, ok := decl.TypeSpec.Type.(*ast.StructType); ok {
+ if err := rp.parseStructType(decl.File, resPtr, tpe, make(map[string]struct{})); err != nil {
+ return err
+ }
+ }
+
+ responses[decl.Name] = response
+ return nil
+}
+
+func (rp *responseParser) parseEmbeddedStruct(gofile *ast.File, response *spec.Response, expr ast.Expr, seenPreviously map[string]struct{}) error {
+ switch tpe := expr.(type) {
+ case *ast.Ident:
+ // do lookup of type
+ // take primitives into account, they should result in an error for swagger
+ pkg, err := rp.scp.packageForFile(gofile, tpe)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ if st, ok := ts.Type.(*ast.StructType); ok {
+ return rp.parseStructType(file, response, st, seenPreviously)
+ }
+ case *ast.SelectorExpr:
+ // look up package, file and then type
+ pkg, err := rp.scp.packageForSelector(gofile, tpe.X)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ if st, ok := ts.Type.(*ast.StructType); ok {
+ return rp.parseStructType(file, response, st, seenPreviously)
+ }
+ case *ast.StarExpr:
+ return rp.parseEmbeddedStruct(gofile, response, tpe.X, seenPreviously)
+ }
+ fmt.Printf("1%#v\n", expr)
+ return fmt.Errorf("unable to resolve embedded struct for: %v", expr)
+}
+
+func (rp *responseParser) parseStructType(gofile *ast.File, response *spec.Response, tpe *ast.StructType, seenPreviously map[string]struct{}) error {
+ if tpe.Fields != nil {
+
+ seenProperties := seenPreviously
+
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) == 0 {
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := rp.parseEmbeddedStruct(gofile, response, fld.Type, seenProperties); err != nil {
+ return err
+ }
+ }
+ }
+
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() {
+ nm, ignore, _, err := parseJSONTag(fld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ var in string
+ // scan for param location first, this changes some behavior down the line
+ if fld.Doc != nil {
+ for _, cmt := range fld.Doc.List {
+ for _, line := range strings.Split(cmt.Text, "\n") {
+ matches := rxIn.FindStringSubmatch(line)
+ if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
+ in = strings.TrimSpace(matches[1])
+ }
+ }
+ }
+ }
+
+ ps := response.Headers[nm]
+
+ // support swagger:file for response
+ // An API operation can return a file, such as an image or PDF. In this case,
+ // define the response schema with type: file and specify the appropriate MIME types in the produces section.
+ if fld.Doc != nil && fileParam(fld.Doc) {
+ response.Schema = &spec.Schema{}
+ response.Schema.Typed("file", "")
+ } else if err := rp.scp.parseNamedType(gofile, fld.Type, responseTypable{in, &ps, response}); err != nil {
+ return err
+ }
+
+ if strfmtName, ok := strfmtName(fld.Doc); ok {
+ ps.Typed("string", strfmtName)
+ }
+
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("maximum", &setMaximum{headerValidations{&ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{headerValidations{&ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{headerValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{headerValidations{&ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{headerValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{headerValidations{&ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("collectionFormat", &setCollectionFormat{headerValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{headerValidations{&ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{headerValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{headerValidations{&ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{headerValidations{&ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxExampleFmt, "")}),
+ }
+ itemsTaggers := func(items *spec.Items, level int) []tagParser {
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
+ if items == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", nm)
+ }
+ }
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := fld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return err
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ if err := sp.Parse(fld.Doc); err != nil {
+ return err
+ }
+
+ if in != "body" {
+ seenProperties[nm] = struct{}{}
+ if response.Headers == nil {
+ response.Headers = make(map[string]spec.Header)
+ }
+ response.Headers[nm] = ps
+ }
+ }
+ }
+
+ for k := range response.Headers {
+ if _, ok := seenProperties[k]; !ok {
+ delete(response.Headers, k)
+ }
+ }
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/route_params.go b/vendor/github.com/go-swagger/go-swagger/scan/route_params.go
new file mode 100644
index 000000000..6dd17f6b4
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/route_params.go
@@ -0,0 +1,253 @@
+//go:build !go1.11
+// +build !go1.11
+
+package scan
+
+import (
+ "errors"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+const (
+ // ParamDescriptionKey indicates the tag used to define a parameter description in swagger:route
+ ParamDescriptionKey = "description"
+ // ParamNameKey indicates the tag used to define a parameter name in swagger:route
+ ParamNameKey = "name"
+ // ParamInKey indicates the tag used to define a parameter location in swagger:route
+ ParamInKey = "in"
+ // ParamRequiredKey indicates the tag used to declare whether a parameter is required in swagger:route
+ ParamRequiredKey = "required"
+ // ParamTypeKey indicates the tag used to define the parameter type in swagger:route
+ ParamTypeKey = "type"
+ // ParamAllowEmptyKey indicates the tag used to indicate whether a parameter allows empty values in swagger:route
+ ParamAllowEmptyKey = "allowempty"
+
+ // SchemaMinKey indicates the tag used to indicate the minimum value allowed for this type in swagger:route
+ SchemaMinKey = "min"
+ // SchemaMaxKey indicates the tag used to indicate the maximum value allowed for this type in swagger:route
+ SchemaMaxKey = "max"
+ // SchemaEnumKey indicates the tag used to specify the allowed values for this type in swagger:route
+ SchemaEnumKey = "enum"
+ // SchemaFormatKey indicates the expected format for this field in swagger:route
+ SchemaFormatKey = "format"
+ // SchemaDefaultKey indicates the default value for this field in swagger:route
+ SchemaDefaultKey = "default"
+ // SchemaMinLenKey indicates the minimum length this field in swagger:route
+ SchemaMinLenKey = "minlength"
+ // SchemaMaxLenKey indicates the minimum length this field in swagger:route
+ SchemaMaxLenKey = "maxlength"
+
+ // TypeArray is the identifier for an array type in swagger:route
+ TypeArray = "array"
+ // TypeNumber is the identifier for a number type in swagger:route
+ TypeNumber = "number"
+ // TypeInteger is the identifier for an integer type in swagger:route
+ TypeInteger = "integer"
+ // TypeBoolean is the identifier for a boolean type in swagger:route
+ TypeBoolean = "boolean"
+ // TypeBool is the identifier for a boolean type in swagger:route
+ TypeBool = "bool"
+ // TypeObject is the identifier for an object type in swagger:route
+ TypeObject = "object"
+ // TypeString is the identifier for a string type in swagger:route
+ TypeString = "string"
+)
+
+var (
+ validIn = []string{"path", "query", "header", "body", "form"}
+ basicTypes = []string{TypeInteger, TypeNumber, TypeString, TypeBoolean, TypeBool, TypeArray}
+)
+
+func newSetParams(params []*spec.Parameter, setter func([]*spec.Parameter)) *setOpParams {
+ return &setOpParams{
+ set: setter,
+ parameters: params,
+ }
+}
+
+type setOpParams struct {
+ set func([]*spec.Parameter)
+ parameters []*spec.Parameter
+}
+
+func (s *setOpParams) Matches(line string) bool {
+ return rxParameters.MatchString(line)
+}
+
+func (s *setOpParams) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var current *spec.Parameter
+ var extraData map[string]string
+
+ for _, line := range lines {
+ l := strings.TrimSpace(line)
+
+ if strings.HasPrefix(l, "+") {
+ s.finalizeParam(current, extraData)
+ current = new(spec.Parameter)
+ extraData = make(map[string]string)
+ l = strings.TrimPrefix(l, "+")
+ }
+
+ kv := strings.SplitN(l, ":", 2)
+
+ if len(kv) <= 1 {
+ continue
+ }
+
+ key := strings.ToLower(strings.TrimSpace(kv[0]))
+ value := strings.TrimSpace(kv[1])
+
+ if current == nil {
+ return errors.New("invalid route/operation schema provided")
+ }
+
+ switch key {
+ case ParamDescriptionKey:
+ current.Description = value
+ case ParamNameKey:
+ current.Name = value
+ case ParamInKey:
+ v := strings.ToLower(value)
+ if contains(validIn, v) {
+ current.In = v
+ }
+ case ParamRequiredKey:
+ if v, err := strconv.ParseBool(value); err == nil {
+ current.Required = v
+ }
+ case ParamTypeKey:
+ if current.Schema == nil {
+ current.Schema = new(spec.Schema)
+ }
+ if contains(basicTypes, value) {
+ current.Type = strings.ToLower(value)
+ if current.Type == TypeBool {
+ current.Type = TypeBoolean
+ }
+ } else {
+ if ref, err := spec.NewRef("#/definitions/" + value); err == nil {
+ current.Type = TypeObject
+ current.Schema.Ref = ref
+ }
+ }
+ current.Schema.Type = spec.StringOrArray{current.Type}
+ case ParamAllowEmptyKey:
+ if v, err := strconv.ParseBool(value); err == nil {
+ current.AllowEmptyValue = v
+ }
+ default:
+ extraData[key] = value
+ }
+ }
+
+ s.finalizeParam(current, extraData)
+ s.set(s.parameters)
+ return nil
+}
+
+func (s *setOpParams) finalizeParam(param *spec.Parameter, data map[string]string) {
+ if param == nil {
+ return
+ }
+
+ processSchema(data, param)
+ s.parameters = append(s.parameters, param)
+}
+
+func processSchema(data map[string]string, param *spec.Parameter) {
+ if param.Schema == nil {
+ return
+ }
+
+ var enumValues []string
+
+ for key, value := range data {
+ switch key {
+ case SchemaMinKey:
+ if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
+ v, _ := strconv.ParseFloat(value, 64)
+ param.Schema.Minimum = &v
+ }
+ case SchemaMaxKey:
+ if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
+ v, _ := strconv.ParseFloat(value, 64)
+ param.Schema.Maximum = &v
+ }
+ case SchemaMinLenKey:
+ if getType(param.Schema) == TypeArray {
+ v, _ := strconv.ParseInt(value, 10, 64)
+ param.Schema.MinLength = &v
+ }
+ case SchemaMaxLenKey:
+ if getType(param.Schema) == TypeArray {
+ v, _ := strconv.ParseInt(value, 10, 64)
+ param.Schema.MaxLength = &v
+ }
+ case SchemaEnumKey:
+ enumValues = strings.Split(value, ",")
+ case SchemaFormatKey:
+ param.Schema.Format = value
+ case SchemaDefaultKey:
+ param.Schema.Default = convert(param.Type, value)
+ }
+ }
+
+ if param.Description != "" {
+ param.Schema.Description = param.Description
+ }
+
+ convertEnum(param.Schema, enumValues)
+}
+
+func convertEnum(schema *spec.Schema, enumValues []string) {
+ if len(enumValues) == 0 {
+ return
+ }
+
+ var finalEnum []interface{}
+ for _, v := range enumValues {
+ finalEnum = append(finalEnum, convert(schema.Type[0], strings.TrimSpace(v)))
+ }
+ schema.Enum = finalEnum
+}
+
+func convert(typeStr, valueStr string) interface{} {
+ switch typeStr {
+ case TypeInteger:
+ fallthrough
+ case TypeNumber:
+ if num, err := strconv.ParseFloat(valueStr, 64); err == nil {
+ return num
+ }
+ case TypeBoolean:
+ fallthrough
+ case TypeBool:
+ if b, err := strconv.ParseBool(valueStr); err == nil {
+ return b
+ }
+ }
+ return valueStr
+}
+
+func getType(schema *spec.Schema) string {
+ if len(schema.Type) == 0 {
+ return ""
+ }
+ return schema.Type[0]
+}
+
+func contains(arr []string, obj string) bool {
+ for _, v := range arr {
+ if v == obj {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/routes.go b/vendor/github.com/go-swagger/go-swagger/scan/routes.go
new file mode 100644
index 000000000..644d61900
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/routes.go
@@ -0,0 +1,146 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/go-openapi/spec"
+
+ "golang.org/x/tools/go/loader"
+)
+
+func opConsumesSetter(op *spec.Operation) func([]string) {
+ return func(consumes []string) { op.Consumes = consumes }
+}
+
+func opProducesSetter(op *spec.Operation) func([]string) {
+ return func(produces []string) { op.Produces = produces }
+}
+
+func opSchemeSetter(op *spec.Operation) func([]string) {
+ return func(schemes []string) { op.Schemes = schemes }
+}
+
+func opSecurityDefsSetter(op *spec.Operation) func([]map[string][]string) {
+ return func(securityDefs []map[string][]string) { op.Security = securityDefs }
+}
+
+func opResponsesSetter(op *spec.Operation) func(*spec.Response, map[int]spec.Response) {
+ return func(def *spec.Response, scr map[int]spec.Response) {
+ if op.Responses == nil {
+ op.Responses = new(spec.Responses)
+ }
+ op.Responses.Default = def
+ op.Responses.StatusCodeResponses = scr
+ }
+}
+
+func opParamSetter(op *spec.Operation) func([]*spec.Parameter) {
+ return func(params []*spec.Parameter) {
+ for _, v := range params {
+ op.AddParam(v)
+ }
+ }
+}
+
+func newRoutesParser(prog *loader.Program) *routesParser {
+ return &routesParser{
+ program: prog,
+ }
+}
+
+type routesParser struct {
+ program *loader.Program
+ definitions map[string]spec.Schema
+ operations map[string]*spec.Operation
+ responses map[string]spec.Response
+ parameters []*spec.Parameter
+}
+
+var routeVendorExtensibleParser = vendorExtensibleParser{
+ setExtensions: func(ext spec.Extensions, dest interface{}) {
+ dest.(*spec.Operation).Extensions = ext
+ },
+}
+
+func (rp *routesParser) Parse(gofile *ast.File, target interface{}, includeTags map[string]bool, excludeTags map[string]bool) error {
+ tgt := target.(*spec.Paths)
+ for _, comsec := range gofile.Comments {
+ content := parsePathAnnotation(rxRoute, comsec.List)
+
+ if content.Method == "" {
+ continue // it's not, next!
+ }
+
+ if !shouldAcceptTag(content.Tags, includeTags, excludeTags) {
+ if Debug {
+ fmt.Printf("route %s %s is ignored due to tag rules\n", content.Method, content.Path)
+ }
+ continue
+ }
+
+ pthObj := tgt.Paths[content.Path]
+ op := setPathOperation(
+ content.Method, content.ID,
+ &pthObj, rp.operations[content.ID])
+
+ op.Tags = content.Tags
+
+ sp := new(sectionedParser)
+ sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
+ sr := newSetResponses(rp.definitions, rp.responses, opResponsesSetter(op))
+ spa := newSetParams(rp.parameters, opParamSetter(op))
+ sp.taggers = []tagParser{
+ newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, opConsumesSetter(op)), false),
+ newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, opProducesSetter(op)), false),
+ newSingleLineTagParser("Schemes", newSetSchemes(opSchemeSetter(op))),
+ newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, opSecurityDefsSetter(op)), false),
+ newMultiLineTagParser("Parameters", spa, false),
+ newMultiLineTagParser("Responses", sr, false),
+ newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, routeVendorExtensibleParser.ParseInto(op)), true),
+ }
+ if err := sp.Parse(content.Remaining); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+
+ if tgt.Paths == nil {
+ tgt.Paths = make(map[string]spec.PathItem)
+ }
+ tgt.Paths[content.Path] = pthObj
+ }
+
+ return nil
+}
+
+func shouldAcceptTag(tags []string, includeTags map[string]bool, excludeTags map[string]bool) bool {
+ for _, tag := range tags {
+ if len(includeTags) > 0 {
+ if includeTags[tag] {
+ return true
+ }
+ } else if len(excludeTags) > 0 {
+ if excludeTags[tag] {
+ return false
+ }
+ }
+ }
+ return len(includeTags) <= 0
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/scanner.go b/vendor/github.com/go-swagger/go-swagger/scan/scanner.go
new file mode 100644
index 000000000..b07616735
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/scanner.go
@@ -0,0 +1,974 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/build"
+ goparser "go/parser"
+ "go/types"
+ "log"
+ "os"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/loads/fmts"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+ "golang.org/x/tools/go/loader"
+ yaml "gopkg.in/yaml.v3"
+)
+
+const (
+ rxMethod = "(\\p{L}+)"
+ rxPath = "((?:/[\\p{L}\\p{N}\\p{Pd}\\p{Pc}{}\\-\\.\\?_~%!$&'()*+,;=:@/]*)+/?)"
+ rxOpTags = "(\\p{L}[\\p{L}\\p{N}\\p{Pd}\\.\\p{Pc}\\p{Zs}]+)"
+ rxOpID = "((?:\\p{L}[\\p{L}\\p{N}\\p{Pd}\\p{Pc}]+)+)"
+
+ rxMaximumFmt = "%s[Mm]ax(?:imum)?\\p{Zs}*:\\p{Zs}*([\\<=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+ rxMinimumFmt = "%s[Mm]in(?:imum)?\\p{Zs}*:\\p{Zs}*([\\>=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+ rxMultipleOfFmt = "%s[Mm]ultiple\\p{Zs}*[Oo]f\\p{Zs}*:\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+
+ rxMaxLengthFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxMinLengthFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxPatternFmt = "%s[Pp]attern\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxCollectionFormatFmt = "%s[Cc]ollection(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ff]ormat)\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxEnumFmt = "%s[Ee]num\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxDefaultFmt = "%s[Dd]efault\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxExampleFmt = "%s[Ee]xample\\p{Zs}*:\\p{Zs}*(.*)$"
+
+ rxMaxItemsFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxMinItemsFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxUniqueFmt = "%s[Uu]nique\\p{Zs}*:\\p{Zs}*(true|false)$"
+
+ rxItemsPrefixFmt = "(?:[Ii]tems[\\.\\p{Zs}]*){%d}"
+)
+
+var (
+ rxSwaggerAnnotation = regexp.MustCompile(`swagger:([\p{L}\p{N}\p{Pd}\p{Pc}]+)`)
+ rxFileUpload = regexp.MustCompile(`swagger:file`)
+ rxStrFmt = regexp.MustCompile(`swagger:strfmt\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxAlias = regexp.MustCompile(`swagger:alias`)
+ rxName = regexp.MustCompile(`swagger:name\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)$`)
+ rxAllOf = regexp.MustCompile(`swagger:allOf\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)?$`)
+ rxModelOverride = regexp.MustCompile(`swagger:model\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxResponseOverride = regexp.MustCompile(`swagger:response\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxParametersOverride = regexp.MustCompile(`swagger:parameters\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\p{Zs}]+)$`)
+ rxEnum = regexp.MustCompile(`swagger:enum\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxIgnoreOverride = regexp.MustCompile(`swagger:ignore\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxDefault = regexp.MustCompile(`swagger:default\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxType = regexp.MustCompile(`swagger:type\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxRoute = regexp.MustCompile(
+ "swagger:route\\p{Zs}*" +
+ rxMethod +
+ "\\p{Zs}*" +
+ rxPath +
+ "(?:\\p{Zs}+" +
+ rxOpTags +
+ ")?\\p{Zs}+" +
+ rxOpID + "\\p{Zs}*$")
+ rxBeginYAMLSpec = regexp.MustCompile(`---\p{Zs}*$`)
+ rxUncommentHeaders = regexp.MustCompile(`^[\p{Zs}\t/\*-]*\|?`)
+ rxUncommentYAML = regexp.MustCompile(`^[\p{Zs}\t]*/*`)
+ rxOperation = regexp.MustCompile(
+ "swagger:operation\\p{Zs}*" +
+ rxMethod +
+ "\\p{Zs}*" +
+ rxPath +
+ "(?:\\p{Zs}+" +
+ rxOpTags +
+ ")?\\p{Zs}+" +
+ rxOpID + "\\p{Zs}*$")
+
+ rxSpace = regexp.MustCompile(`\p{Zs}+`)
+ rxIndent = regexp.MustCompile(`\p{Zs}*/*\p{Zs}*[^\p{Zs}]`)
+ rxPunctuationEnd = regexp.MustCompile(`\p{Po}$`)
+ rxStripComments = regexp.MustCompile(`^[^\p{L}\p{N}\p{Pd}\p{Pc}\+]*`)
+ rxStripTitleComments = regexp.MustCompile(`^[^\p{L}]*[Pp]ackage\p{Zs}+[^\p{Zs}]+\p{Zs}*`)
+ rxAllowedExtensions = regexp.MustCompile(`^[Xx]-`)
+
+ rxIn = regexp.MustCompile(`[Ii]n\p{Zs}*:\p{Zs}*(query|path|header|body|formData)$`)
+ rxRequired = regexp.MustCompile(`[Rr]equired\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxDiscriminator = regexp.MustCompile(`[Dd]iscriminator\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxReadOnly = regexp.MustCompile(`[Rr]ead(?:\p{Zs}*|[\p{Pd}\p{Pc}])?[Oo]nly\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxConsumes = regexp.MustCompile(`[Cc]onsumes\p{Zs}*:`)
+ rxProduces = regexp.MustCompile(`[Pp]roduces\p{Zs}*:`)
+ rxSecuritySchemes = regexp.MustCompile(`[Ss]ecurity\p{Zs}*:`)
+ rxSecurity = regexp.MustCompile(`[Ss]ecurity\p{Zs}*[Dd]efinitions:`)
+ rxResponses = regexp.MustCompile(`[Rr]esponses\p{Zs}*:`)
+ rxParameters = regexp.MustCompile(`[Pp]arameters\p{Zs}*:`)
+ rxSchemes = regexp.MustCompile(`[Ss]chemes\p{Zs}*:\p{Zs}*((?:(?:https?|HTTPS?|wss?|WSS?)[\p{Zs},]*)+)$`)
+ rxVersion = regexp.MustCompile(`[Vv]ersion\p{Zs}*:\p{Zs}*(.+)$`)
+ rxHost = regexp.MustCompile(`[Hh]ost\p{Zs}*:\p{Zs}*(.+)$`)
+ rxBasePath = regexp.MustCompile(`[Bb]ase\p{Zs}*-*[Pp]ath\p{Zs}*:\p{Zs}*` + rxPath + "$")
+ rxLicense = regexp.MustCompile(`[Ll]icense\p{Zs}*:\p{Zs}*(.+)$`)
+ rxContact = regexp.MustCompile(`[Cc]ontact\p{Zs}*-?(?:[Ii]info\p{Zs}*)?:\p{Zs}*(.+)$`)
+ rxTOS = regexp.MustCompile(`[Tt](:?erms)?\p{Zs}*-?[Oo]f?\p{Zs}*-?[Ss](?:ervice)?\p{Zs}*:`)
+ rxExtensions = regexp.MustCompile(`[Ee]xtensions\p{Zs}*:`)
+ rxInfoExtensions = regexp.MustCompile(`[In]nfo\p{Zs}*[Ee]xtensions:`)
+ // currently unused: rxExample = regexp.MustCompile(`[Ex]ample\p{Zs}*:\p{Zs}*(.*)$`)
+)
+
+// Many thanks go to https://github.com/yvasiyarov/swagger
+// this is loosely based on that implementation but for swagger 2.0
+
+func joinDropLast(lines []string) string {
+ l := len(lines)
+ lns := lines
+ if l > 0 && len(strings.TrimSpace(lines[l-1])) == 0 {
+ lns = lines[:l-1]
+ }
+ return strings.Join(lns, "\n")
+}
+
+func removeEmptyLines(lines []string) (notEmpty []string) {
+ for _, l := range lines {
+ if len(strings.TrimSpace(l)) > 0 {
+ notEmpty = append(notEmpty, l)
+ }
+ }
+ return
+}
+
+func rxf(rxp, ar string) *regexp.Regexp {
+ return regexp.MustCompile(fmt.Sprintf(rxp, ar))
+}
+
+// The Opts for the application scanner.
+type Opts struct {
+ BasePath string
+ Input *spec.Swagger
+ ScanModels bool
+ BuildTags string
+ Include []string
+ Exclude []string
+ IncludeTags []string
+ ExcludeTags []string
+}
+
+func safeConvert(str string) bool {
+ b, err := swag.ConvertBool(str)
+ if err != nil {
+ return false
+ }
+ return b
+}
+
+// Debug is true when process is run with DEBUG=1 env var
+var Debug = safeConvert(os.Getenv("DEBUG"))
+
+// Application scans the application and builds a swagger spec based on the information from the code files.
+// When there are includes provided, only those files are considered for the initial discovery.
+// Similarly the excludes will exclude an item from initial discovery through scanning for annotations.
+// When something in the discovered items requires a type that is contained in the includes or excludes it will still be
+// in the spec.
+func Application(opts Opts) (*spec.Swagger, error) {
+ parser, err := newAppScanner(&opts)
+
+ if err != nil {
+ return nil, err
+ }
+ return parser.Parse()
+}
+
+// appScanner the global context for scanning a go application
+// into a swagger specification
+type appScanner struct {
+ loader *loader.Config
+ prog *loader.Program
+ classifier *programClassifier
+ discovered []schemaDecl
+ input *spec.Swagger
+ definitions map[string]spec.Schema
+ responses map[string]spec.Response
+ operations map[string]*spec.Operation
+ scanModels bool
+ includeTags map[string]bool
+ excludeTas map[string]bool
+
+ // MainPackage the path to find the main class in
+ MainPackage string
+}
+
+// newAppScanner creates a new api parser
+func newAppScanner(opts *Opts) (*appScanner, error) {
+ if Debug {
+ log.Println("scanning packages discovered through entrypoint @ ", opts.BasePath)
+ }
+ var ldr loader.Config
+ ldr.ParserMode = goparser.ParseComments
+ ldr.Import(opts.BasePath)
+ if opts.BuildTags != "" {
+ ldr.Build = &build.Default
+ ldr.Build.BuildTags = strings.Split(opts.BuildTags, ",")
+ }
+ ldr.TypeChecker = types.Config{FakeImportC: true}
+ prog, err := ldr.Load()
+ if err != nil {
+ return nil, err
+ }
+
+ var includes, excludes packageFilters
+ if len(opts.Include) > 0 {
+ for _, include := range opts.Include {
+ includes = append(includes, packageFilter{Name: include})
+ }
+ }
+ if len(opts.Exclude) > 0 {
+ for _, exclude := range opts.Exclude {
+ excludes = append(excludes, packageFilter{Name: exclude})
+ }
+ }
+ includeTags := make(map[string]bool)
+ for _, includeTag := range opts.IncludeTags {
+ includeTags[includeTag] = true
+ }
+ excludeTags := make(map[string]bool)
+ for _, excludeTag := range opts.ExcludeTags {
+ excludeTags[excludeTag] = true
+ }
+
+ input := opts.Input
+ if input == nil {
+ input = new(spec.Swagger)
+ input.Swagger = "2.0"
+ }
+
+ if input.Paths == nil {
+ input.Paths = new(spec.Paths)
+ }
+ if input.Definitions == nil {
+ input.Definitions = make(map[string]spec.Schema)
+ }
+ if input.Responses == nil {
+ input.Responses = make(map[string]spec.Response)
+ }
+ if input.Extensions == nil {
+ input.Extensions = make(spec.Extensions)
+ }
+
+ return &appScanner{
+ MainPackage: opts.BasePath,
+ prog: prog,
+ input: input,
+ loader: &ldr,
+ operations: collectOperationsFromInput(input),
+ definitions: input.Definitions,
+ responses: input.Responses,
+ scanModels: opts.ScanModels,
+ classifier: &programClassifier{
+ Includes: includes,
+ Excludes: excludes,
+ },
+ includeTags: includeTags,
+ excludeTas: excludeTags,
+ }, nil
+}
+
+func collectOperationsFromInput(input *spec.Swagger) map[string]*spec.Operation {
+ operations := make(map[string]*spec.Operation)
+ if input != nil && input.Paths != nil {
+ for _, pth := range input.Paths.Paths {
+ if pth.Get != nil {
+ operations[pth.Get.ID] = pth.Get
+ }
+ if pth.Post != nil {
+ operations[pth.Post.ID] = pth.Post
+ }
+ if pth.Put != nil {
+ operations[pth.Put.ID] = pth.Put
+ }
+ if pth.Patch != nil {
+ operations[pth.Patch.ID] = pth.Patch
+ }
+ if pth.Delete != nil {
+ operations[pth.Delete.ID] = pth.Delete
+ }
+ if pth.Head != nil {
+ operations[pth.Head.ID] = pth.Head
+ }
+ if pth.Options != nil {
+ operations[pth.Options.ID] = pth.Options
+ }
+ }
+ }
+ return operations
+}
+
+// Parse produces a swagger object for an application
+func (a *appScanner) Parse() (*spec.Swagger, error) {
+ // classification still includes files that are completely commented out
+ cp, err := a.classifier.Classify(a.prog)
+ if err != nil {
+ return nil, err
+ }
+
+ // build models dictionary
+ if a.scanModels {
+ for _, modelsFile := range cp.Models {
+ if err := a.parseSchema(modelsFile); err != nil {
+ return nil, err
+ }
+ }
+ }
+
+ // build parameters dictionary
+ for _, paramsFile := range cp.Parameters {
+ if err := a.parseParameters(paramsFile); err != nil {
+ return nil, err
+ }
+ }
+
+ // build responses dictionary
+ for _, responseFile := range cp.Responses {
+ if err := a.parseResponses(responseFile); err != nil {
+ return nil, err
+ }
+ }
+
+ // build definitions dictionary
+ if err := a.processDiscovered(); err != nil {
+ return nil, err
+ }
+
+ // build paths dictionary
+ for _, routeFile := range cp.Routes {
+ if err := a.parseRoutes(routeFile); err != nil {
+ return nil, err
+ }
+ }
+ for _, operationFile := range cp.Operations {
+ if err := a.parseOperations(operationFile); err != nil {
+ return nil, err
+ }
+ }
+
+ // build swagger object
+ for _, metaFile := range cp.Meta {
+ if err := a.parseMeta(metaFile); err != nil {
+ return nil, err
+ }
+ }
+
+ if a.input.Swagger == "" {
+ a.input.Swagger = "2.0"
+ }
+
+ return a.input, nil
+}
+
+func (a *appScanner) processDiscovered() error {
+ // loop over discovered until all the items are in definitions
+ keepGoing := len(a.discovered) > 0
+ for keepGoing {
+ var queue []schemaDecl
+ for _, d := range a.discovered {
+ if _, ok := a.definitions[d.Name]; !ok {
+ queue = append(queue, d)
+ }
+ }
+ a.discovered = nil
+ for _, sd := range queue {
+ if err := a.parseDiscoveredSchema(sd); err != nil {
+ return err
+ }
+ }
+ keepGoing = len(a.discovered) > 0
+ }
+
+ return nil
+}
+
+func (a *appScanner) parseSchema(file *ast.File) error {
+ sp := newSchemaParser(a.prog)
+ if err := sp.Parse(file, a.definitions); err != nil {
+ return err
+ }
+ a.discovered = append(a.discovered, sp.postDecls...)
+ return nil
+}
+
+func (a *appScanner) parseDiscoveredSchema(sd schemaDecl) error {
+ sp := newSchemaParser(a.prog)
+ sp.discovered = &sd
+
+ if err := sp.Parse(sd.File, a.definitions); err != nil {
+ return err
+ }
+ a.discovered = append(a.discovered, sp.postDecls...)
+ return nil
+}
+
+func (a *appScanner) parseRoutes(file *ast.File) error {
+ rp := newRoutesParser(a.prog)
+ rp.operations = a.operations
+ rp.definitions = a.definitions
+ rp.responses = a.responses
+
+ return rp.Parse(file, a.input.Paths, a.includeTags, a.excludeTas)
+}
+
+func (a *appScanner) parseOperations(file *ast.File) error {
+ op := newOperationsParser(a.prog)
+ op.operations = a.operations
+ op.definitions = a.definitions
+ op.responses = a.responses
+ return op.Parse(file, a.input.Paths, a.includeTags, a.excludeTas)
+}
+
+func (a *appScanner) parseParameters(file *ast.File) error {
+ rp := newParameterParser(a.prog)
+ if err := rp.Parse(file, a.operations); err != nil {
+ return err
+ }
+ a.discovered = append(a.discovered, rp.postDecls...)
+ a.discovered = append(a.discovered, rp.scp.postDecls...)
+ return nil
+}
+
+func (a *appScanner) parseResponses(file *ast.File) error {
+ rp := newResponseParser(a.prog)
+ if err := rp.Parse(file, a.responses); err != nil {
+ return err
+ }
+ a.discovered = append(a.discovered, rp.postDecls...)
+ a.discovered = append(a.discovered, rp.scp.postDecls...)
+ return nil
+}
+
+func (a *appScanner) parseMeta(file *ast.File) error {
+ return newMetaParser(a.input).Parse(file.Doc)
+}
+
+// MustExpandPackagePath gets the real package path on disk
+func (a *appScanner) MustExpandPackagePath(packagePath string) string {
+ pkgRealpath := swag.FindInGoSearchPath(packagePath)
+ if pkgRealpath == "" {
+ log.Fatalf("Can't find package %s \n", packagePath)
+ }
+
+ return pkgRealpath
+}
+
+type swaggerTypable interface {
+ Typed(string, string)
+ SetRef(spec.Ref)
+ Items() swaggerTypable
+ WithEnum(...interface{})
+ Schema() *spec.Schema
+ Level() int
+}
+
+// Map all Go builtin types that have Json representation to Swagger/Json types.
+// See https://golang.org/pkg/builtin/ and http://swagger.io/specification/
+func swaggerSchemaForType(typeName string, prop swaggerTypable) error {
+ switch typeName {
+ case "bool":
+ prop.Typed("boolean", "")
+ case "byte":
+ prop.Typed("integer", "uint8")
+ case "complex128", "complex64":
+ return fmt.Errorf("unsupported builtin %q (no JSON marshaller)", typeName)
+ case "error":
+ // TODO: error is often marshalled into a string but not always (e.g. errors package creates
+ // errors that are marshalled into an empty object), this could be handled the same way
+ // custom JSON marshallers are handled (in future)
+ prop.Typed("string", "")
+ case "float32":
+ prop.Typed("number", "float")
+ case "float64":
+ prop.Typed("number", "double")
+ case "int":
+ prop.Typed("integer", "int64")
+ case "int16":
+ prop.Typed("integer", "int16")
+ case "int32":
+ prop.Typed("integer", "int32")
+ case "int64":
+ prop.Typed("integer", "int64")
+ case "int8":
+ prop.Typed("integer", "int8")
+ case "rune":
+ prop.Typed("integer", "int32")
+ case "string":
+ prop.Typed("string", "")
+ case "uint":
+ prop.Typed("integer", "uint64")
+ case "uint16":
+ prop.Typed("integer", "uint16")
+ case "uint32":
+ prop.Typed("integer", "uint32")
+ case "uint64":
+ prop.Typed("integer", "uint64")
+ case "uint8":
+ prop.Typed("integer", "uint8")
+ case "uintptr":
+ prop.Typed("integer", "uint64")
+ default:
+ return fmt.Errorf("unsupported type %q", typeName)
+ }
+ return nil
+}
+
+func newMultiLineTagParser(name string, parser valueParser, skipCleanUp bool) tagParser {
+ return tagParser{
+ Name: name,
+ MultiLine: true,
+ SkipCleanUp: skipCleanUp,
+ Parser: parser,
+ }
+}
+
+func newSingleLineTagParser(name string, parser valueParser) tagParser {
+ return tagParser{
+ Name: name,
+ MultiLine: false,
+ SkipCleanUp: false,
+ Parser: parser,
+ }
+}
+
+type tagParser struct {
+ Name string
+ MultiLine bool
+ SkipCleanUp bool
+ Lines []string
+ Parser valueParser
+}
+
+func (st *tagParser) Matches(line string) bool {
+ return st.Parser.Matches(line)
+}
+
+func (st *tagParser) Parse(lines []string) error {
+ return st.Parser.Parse(lines)
+}
+
+func newYamlParser(rx *regexp.Regexp, setter func(json.RawMessage) error) valueParser {
+ return &yamlParser{
+ set: setter,
+ rx: rx,
+ }
+}
+
+type yamlParser struct {
+ set func(json.RawMessage) error
+ rx *regexp.Regexp
+}
+
+func (y *yamlParser) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var uncommented []string
+ uncommented = append(uncommented, removeYamlIndent(lines)...)
+
+ yamlContent := strings.Join(uncommented, "\n")
+ var yamlValue interface{}
+ err := yaml.Unmarshal([]byte(yamlContent), &yamlValue)
+ if err != nil {
+ return err
+ }
+
+ var jsonValue json.RawMessage
+ jsonValue, err = fmts.YAMLToJSON(yamlValue)
+ if err != nil {
+ return err
+ }
+
+ return y.set(jsonValue)
+}
+
+func (y *yamlParser) Matches(line string) bool {
+ return y.rx.MatchString(line)
+}
+
+// aggregates lines in header until it sees `---`,
+// the beginning of a YAML spec
+type yamlSpecScanner struct {
+ header []string
+ yamlSpec []string
+ setTitle func([]string)
+ setDescription func([]string)
+ workedOutTitle bool
+ title []string
+ skipHeader bool
+}
+
+func cleanupScannerLines(lines []string, ur *regexp.Regexp, yamlBlock *regexp.Regexp) []string {
+ // bail early when there is nothing to parse
+ if len(lines) == 0 {
+ return lines
+ }
+ seenLine := -1
+ var lastContent int
+ var uncommented []string
+ var startBlock bool
+ var yaml []string
+ for i, v := range lines {
+ if yamlBlock != nil && yamlBlock.MatchString(v) && !startBlock {
+ startBlock = true
+ if seenLine < 0 {
+ seenLine = i
+ }
+ continue
+ }
+ if startBlock {
+ if yamlBlock.MatchString(v) {
+ startBlock = false
+ uncommented = append(uncommented, removeIndent(yaml)...)
+ continue
+ }
+ yaml = append(yaml, v)
+ if v != "" {
+ if seenLine < 0 {
+ seenLine = i
+ }
+ lastContent = i
+ }
+ continue
+ }
+ str := ur.ReplaceAllString(v, "")
+ uncommented = append(uncommented, str)
+ if str != "" {
+ if seenLine < 0 {
+ seenLine = i
+ }
+ lastContent = i
+ }
+ }
+
+ // fixes issue #50
+ if seenLine == -1 {
+ return nil
+ }
+ return uncommented[seenLine : lastContent+1]
+}
+
+// a shared function that can be used to split given headers
+// into a title and description
+func collectScannerTitleDescription(headers []string) (title, desc []string) {
+ hdrs := cleanupScannerLines(headers, rxUncommentHeaders, nil)
+
+ idx := -1
+ for i, line := range hdrs {
+ if strings.TrimSpace(line) == "" {
+ idx = i
+ break
+ }
+ }
+
+ if idx > -1 {
+ title = hdrs[:idx]
+ if len(hdrs) > idx+1 {
+ desc = hdrs[idx+1:]
+ } else {
+ desc = nil
+ }
+ return
+ }
+
+ if len(hdrs) > 0 {
+ line := hdrs[0]
+ if rxPunctuationEnd.MatchString(line) {
+ title = []string{line}
+ desc = hdrs[1:]
+ } else {
+ desc = hdrs
+ }
+ }
+
+ return
+}
+
+func (sp *yamlSpecScanner) collectTitleDescription() {
+ if sp.workedOutTitle {
+ return
+ }
+ if sp.setTitle == nil {
+ sp.header = cleanupScannerLines(sp.header, rxUncommentHeaders, nil)
+ return
+ }
+
+ sp.workedOutTitle = true
+ sp.title, sp.header = collectScannerTitleDescription(sp.header)
+}
+
+func (sp *yamlSpecScanner) Title() []string {
+ sp.collectTitleDescription()
+ return sp.title
+}
+
+func (sp *yamlSpecScanner) Description() []string {
+ sp.collectTitleDescription()
+ return sp.header
+}
+
+func (sp *yamlSpecScanner) Parse(doc *ast.CommentGroup) error {
+ if doc == nil {
+ return nil
+ }
+ var startedYAMLSpec bool
+COMMENTS:
+ for _, c := range doc.List {
+ for _, line := range strings.Split(c.Text, "\n") {
+ if rxSwaggerAnnotation.MatchString(line) {
+ break COMMENTS // a new swagger: annotation terminates this parser
+ }
+
+ if !startedYAMLSpec {
+ if rxBeginYAMLSpec.MatchString(line) {
+ startedYAMLSpec = true
+ sp.yamlSpec = append(sp.yamlSpec, line)
+ continue
+ }
+
+ if !sp.skipHeader {
+ sp.header = append(sp.header, line)
+ }
+
+ // no YAML spec yet, moving on
+ continue
+ }
+
+ sp.yamlSpec = append(sp.yamlSpec, line)
+ }
+ }
+ if sp.setTitle != nil {
+ sp.setTitle(sp.Title())
+ }
+ if sp.setDescription != nil {
+ sp.setDescription(sp.Description())
+ }
+ return nil
+}
+
+func (sp *yamlSpecScanner) UnmarshalSpec(u func([]byte) error) (err error) {
+ spec := cleanupScannerLines(sp.yamlSpec, rxUncommentYAML, nil)
+ if len(spec) == 0 {
+ return errors.New("no spec available to unmarshal")
+ }
+
+ if !strings.Contains(spec[0], "---") {
+ return errors.New("yaml spec has to start with `---`")
+ }
+
+ // remove indentation
+ spec = removeIndent(spec)
+
+ // 1. parse yaml lines
+ yamlValue := make(map[interface{}]interface{})
+
+ yamlContent := strings.Join(spec, "\n")
+ err = yaml.Unmarshal([]byte(yamlContent), &yamlValue)
+ if err != nil {
+ return
+ }
+
+ // 2. convert to json
+ var jsonValue json.RawMessage
+ jsonValue, err = fmts.YAMLToJSON(yamlValue)
+ if err != nil {
+ return
+ }
+
+ // 3. unmarshal the json into an interface
+ var data []byte
+ data, err = jsonValue.MarshalJSON()
+ if err != nil {
+ return
+ }
+ err = u(data)
+ if err != nil {
+ return
+ }
+
+ // all parsed, returning...
+ sp.yamlSpec = nil // spec is now consumed, so let's erase the parsed lines
+ return
+}
+
+// removes indent base on the first line
+func removeIndent(spec []string) []string {
+ loc := rxIndent.FindStringIndex(spec[0])
+ if loc[1] > 0 {
+ for i := range spec {
+ if len(spec[i]) >= loc[1] {
+ spec[i] = spec[i][loc[1]-1:]
+ }
+ }
+ }
+ return spec
+}
+
+// removes indent base on the first line
+func removeYamlIndent(spec []string) []string {
+ loc := rxIndent.FindStringIndex(spec[0])
+ var s []string
+ if loc[1] > 0 {
+ for i := range spec {
+ if len(spec[i]) >= loc[1] {
+ s = append(s, spec[i][loc[1]-1:])
+ }
+ }
+ }
+ return s
+}
+
+// aggregates lines in header until it sees a tag.
+type sectionedParser struct {
+ header []string
+ matched map[string]tagParser
+ annotation valueParser
+
+ seenTag bool
+ skipHeader bool
+ setTitle func([]string)
+ setDescription func([]string)
+ workedOutTitle bool
+ taggers []tagParser
+ currentTagger *tagParser
+ title []string
+ ignored bool
+}
+
+func (st *sectionedParser) collectTitleDescription() {
+ if st.workedOutTitle {
+ return
+ }
+ if st.setTitle == nil {
+ st.header = cleanupScannerLines(st.header, rxUncommentHeaders, nil)
+ return
+ }
+
+ st.workedOutTitle = true
+ st.title, st.header = collectScannerTitleDescription(st.header)
+}
+
+func (st *sectionedParser) Title() []string {
+ st.collectTitleDescription()
+ return st.title
+}
+
+func (st *sectionedParser) Description() []string {
+ st.collectTitleDescription()
+ return st.header
+}
+
+func (st *sectionedParser) Parse(doc *ast.CommentGroup) error {
+ if doc == nil {
+ return nil
+ }
+COMMENTS:
+ for _, c := range doc.List {
+ for _, line := range strings.Split(c.Text, "\n") {
+ if rxSwaggerAnnotation.MatchString(line) {
+ if rxIgnoreOverride.MatchString(line) {
+ st.ignored = true
+ break COMMENTS // an explicit ignore terminates this parser
+ }
+ if st.annotation == nil || !st.annotation.Matches(line) {
+ break COMMENTS // a new swagger: annotation terminates this parser
+ }
+
+ _ = st.annotation.Parse([]string{line})
+ if len(st.header) > 0 {
+ st.seenTag = true
+ }
+ continue
+ }
+
+ var matched bool
+ for _, tagger := range st.taggers {
+ if tagger.Matches(line) {
+ st.seenTag = true
+ st.currentTagger = &tagger
+ matched = true
+ break
+ }
+ }
+
+ if st.currentTagger == nil {
+ if !st.skipHeader && !st.seenTag {
+ st.header = append(st.header, line)
+ }
+ // didn't match a tag, moving on
+ continue
+ }
+
+ if st.currentTagger.MultiLine && matched {
+ // the first line of a multiline tagger doesn't count
+ continue
+ }
+
+ ts, ok := st.matched[st.currentTagger.Name]
+ if !ok {
+ ts = *st.currentTagger
+ }
+ ts.Lines = append(ts.Lines, line)
+ if st.matched == nil {
+ st.matched = make(map[string]tagParser)
+ }
+ st.matched[st.currentTagger.Name] = ts
+
+ if !st.currentTagger.MultiLine {
+ st.currentTagger = nil
+ }
+ }
+ }
+ if st.setTitle != nil {
+ st.setTitle(st.Title())
+ }
+ if st.setDescription != nil {
+ st.setDescription(st.Description())
+ }
+ for _, mt := range st.matched {
+ if !mt.SkipCleanUp {
+ mt.Lines = cleanupScannerLines(mt.Lines, rxUncommentHeaders, nil)
+ }
+ if err := mt.Parse(mt.Lines); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+type vendorExtensibleParser struct {
+ setExtensions func(ext spec.Extensions, dest interface{})
+}
+
+func (extParser vendorExtensibleParser) ParseInto(dest interface{}) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ extParser.setExtensions(jsonData, dest)
+ return nil
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/schema.go b/vendor/github.com/go-swagger/go-swagger/scan/schema.go
new file mode 100644
index 000000000..37ce6cf25
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/schema.go
@@ -0,0 +1,1358 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/loader"
+
+ "github.com/go-openapi/spec"
+)
+
+func addExtension(ve *spec.VendorExtensible, key string, value interface{}) {
+ if os.Getenv("SWAGGER_GENERATE_EXTENSION") == "false" {
+ return
+ }
+
+ ve.AddExtension(key, value)
+}
+
+type schemaTypable struct {
+ schema *spec.Schema
+ level int
+}
+
+func (st schemaTypable) Typed(tpe, format string) {
+ st.schema.Typed(tpe, format)
+}
+
+func (st schemaTypable) SetRef(ref spec.Ref) {
+ st.schema.Ref = ref
+}
+
+func (st schemaTypable) Schema() *spec.Schema {
+ return st.schema
+}
+
+func (st schemaTypable) Items() swaggerTypable {
+ if st.schema.Items == nil {
+ st.schema.Items = new(spec.SchemaOrArray)
+ }
+ if st.schema.Items.Schema == nil {
+ st.schema.Items.Schema = new(spec.Schema)
+ }
+
+ st.schema.Typed("array", "")
+ return schemaTypable{st.schema.Items.Schema, st.level + 1}
+}
+
+func (st schemaTypable) AdditionalProperties() swaggerTypable {
+ if st.schema.AdditionalProperties == nil {
+ st.schema.AdditionalProperties = new(spec.SchemaOrBool)
+ }
+ if st.schema.AdditionalProperties.Schema == nil {
+ st.schema.AdditionalProperties.Schema = new(spec.Schema)
+ }
+
+ st.schema.Typed("object", "")
+ return schemaTypable{st.schema.AdditionalProperties.Schema, st.level + 1}
+}
+
+func (st schemaTypable) Level() int { return st.level }
+
+func (st schemaTypable) WithEnum(values ...interface{}) {
+ st.schema.WithEnum(values...)
+}
+
+type schemaValidations struct {
+ current *spec.Schema
+}
+
+func (sv schemaValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv schemaValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv schemaValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv schemaValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv schemaValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv schemaValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv schemaValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv schemaValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv schemaValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv schemaValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv schemaValidations) SetExample(val interface{}) { sv.current.Example = val }
+func (sv schemaValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Format: sv.current.Format, Type: sv.current.Type[0]})
+}
+
+type schemaDecl struct {
+ File *ast.File
+ Decl *ast.GenDecl
+ TypeSpec *ast.TypeSpec
+ GoName string
+ Name string
+ annotated bool
+}
+
+func newSchemaDecl(file *ast.File, decl *ast.GenDecl, ts *ast.TypeSpec) *schemaDecl {
+ sd := &schemaDecl{
+ File: file,
+ Decl: decl,
+ TypeSpec: ts,
+ }
+ sd.inferNames()
+ return sd
+}
+
+func (sd *schemaDecl) hasAnnotation() bool {
+ sd.inferNames()
+ return sd.annotated
+}
+
+func (sd *schemaDecl) inferNames() (goName string, name string) {
+ if sd.GoName != "" {
+ goName, name = sd.GoName, sd.Name
+ return
+ }
+ goName = sd.TypeSpec.Name.Name
+ name = goName
+ if sd.Decl.Doc != nil {
+ DECLS:
+ for _, cmt := range sd.Decl.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxModelOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ sd.annotated = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ name = matches[1]
+ break DECLS
+ }
+ }
+ }
+ }
+ sd.GoName = goName
+ sd.Name = name
+ return
+}
+
+type schemaParser struct {
+ program *loader.Program
+ postDecls []schemaDecl
+ known map[string]spec.Schema
+ discovered *schemaDecl
+}
+
+func newSchemaParser(prog *loader.Program) *schemaParser {
+ scp := new(schemaParser)
+ scp.program = prog
+ scp.known = make(map[string]spec.Schema)
+ return scp
+}
+
+func (scp *schemaParser) Parse(gofile *ast.File, target interface{}) error {
+ tgt := target.(map[string]spec.Schema)
+ for _, decl := range gofile.Decls {
+ gd, ok := decl.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+ for _, spc := range gd.Specs {
+ if ts, ok := spc.(*ast.TypeSpec); ok {
+ sd := newSchemaDecl(gofile, gd, ts)
+ if err := scp.parseDecl(tgt, sd); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (scp *schemaParser) parseDecl(definitions map[string]spec.Schema, decl *schemaDecl) error {
+ // check if there is a swagger:model tag that is followed by a word,
+ // this word is the type name for swagger
+ // the package and type are recorded in the extensions
+ // once type name is found convert it to a schema, by looking up the schema in the
+ // definitions dictionary that got passed into this parse method
+
+ // if our schemaParser is parsing a discovered schemaDecl and it does not match
+ // the current schemaDecl we can skip parsing.
+ if scp.discovered != nil && scp.discovered.Name != decl.Name {
+ return nil
+ }
+
+ decl.inferNames()
+ schema := definitions[decl.Name]
+ schPtr := &schema
+
+ // analyze doc comment for the model
+ sp := new(sectionedParser)
+ sp.setTitle = func(lines []string) { schema.Title = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) { schema.Description = joinDropLast(lines) }
+ if err := sp.Parse(decl.Decl.Doc); err != nil {
+ return err
+ }
+
+ // if the type is marked to ignore, just return
+ if sp.ignored {
+ return nil
+ }
+
+ // analyze struct body for fields etc
+ // each exported struct field:
+ // * gets a type mapped to a go primitive
+ // * perhaps gets a format
+ // * has to document the validations that apply for the type and the field
+ // * when the struct field points to a model it becomes a ref: #/definitions/ModelName
+ // * the first line of the comment is the title
+ // * the following lines are the description
+ switch tpe := decl.TypeSpec.Type.(type) {
+ case *ast.StructType:
+ if err := scp.parseStructType(decl.File, schPtr, tpe, make(map[string]string)); err != nil {
+ return err
+ }
+ case *ast.InterfaceType:
+ if err := scp.parseInterfaceType(decl.File, schPtr, tpe, make(map[string]string)); err != nil {
+ return err
+ }
+ case *ast.Ident:
+ prop := &schemaTypable{schPtr, 0}
+ if strfmtName, ok := strfmtName(decl.Decl.Doc); ok {
+ prop.Typed("string", strfmtName)
+ } else {
+ if err := scp.parseNamedType(decl.File, tpe, prop); err != nil {
+ return err
+ }
+ }
+ if enumName, ok := enumName(decl.Decl.Doc); ok {
+ var enumValues = getEnumValues(decl.File, enumName)
+ if len(enumValues) > 0 {
+ var typeName = reflect.TypeOf(enumValues[0]).String()
+ prop.WithEnum(enumValues...)
+
+ err := swaggerSchemaForType(typeName, prop)
+ if err != nil {
+ return fmt.Errorf("file %s, error is: %v", decl.File.Name, err)
+ }
+ }
+ }
+ case *ast.SelectorExpr:
+ prop := &schemaTypable{schPtr, 0}
+ if strfmtName, ok := strfmtName(decl.Decl.Doc); ok {
+ prop.Typed("string", strfmtName)
+ } else {
+ if err := scp.parseNamedType(decl.File, tpe, prop); err != nil {
+ return err
+ }
+ }
+
+ case *ast.ArrayType:
+ prop := &schemaTypable{schPtr, 0}
+ if strfmtName, ok := strfmtName(decl.Decl.Doc); ok {
+ prop.Items().Typed("string", strfmtName)
+ } else {
+ if err := scp.parseNamedType(decl.File, tpe, &schemaTypable{schPtr, 0}); err != nil {
+ return err
+ }
+ }
+
+ case *ast.MapType:
+ prop := &schemaTypable{schPtr, 0}
+ if strfmtName, ok := strfmtName(decl.Decl.Doc); ok {
+ prop.AdditionalProperties().Typed("string", strfmtName)
+ } else {
+ if err := scp.parseNamedType(decl.File, tpe, &schemaTypable{schPtr, 0}); err != nil {
+ return err
+ }
+ }
+ default:
+ log.Printf("WARNING: Missing parser for a %T, skipping model: %s\n", tpe, decl.Name)
+ return nil
+ }
+
+ if schPtr.Ref.String() == "" {
+ if decl.Name != decl.GoName {
+ addExtension(&schPtr.VendorExtensible, "x-go-name", decl.GoName)
+ }
+ for _, pkgInfo := range scp.program.AllPackages {
+ if pkgInfo.Importable {
+ for _, fil := range pkgInfo.Files {
+ if fil.Pos() == decl.File.Pos() {
+ addExtension(&schPtr.VendorExtensible, "x-go-package", pkgInfo.Pkg.Path())
+ }
+ }
+ }
+ }
+ }
+ definitions[decl.Name] = schema
+ return nil
+}
+
+func (scp *schemaParser) parseNamedType(gofile *ast.File, expr ast.Expr, prop swaggerTypable) error {
+ switch ftpe := expr.(type) {
+ case *ast.Ident: // simple value
+ pkg, err := scp.packageForFile(gofile, ftpe)
+ if err != nil {
+ return err
+ }
+ return scp.parseIdentProperty(pkg, ftpe, prop)
+
+ case *ast.StarExpr: // pointer to something, optional by default
+ if err := scp.parseNamedType(gofile, ftpe.X, prop); err != nil {
+ return err
+ }
+
+ case *ast.ArrayType: // slice type
+ if err := scp.parseNamedType(gofile, ftpe.Elt, prop.Items()); err != nil {
+ return err
+ }
+
+ case *ast.StructType:
+ schema := prop.Schema()
+ if schema == nil {
+ return fmt.Errorf("items doesn't support embedded structs")
+ }
+ return scp.parseStructType(gofile, prop.Schema(), ftpe, make(map[string]string))
+
+ case *ast.SelectorExpr:
+ err := scp.typeForSelector(gofile, ftpe, prop)
+ return err
+
+ case *ast.MapType:
+ // check if key is a string type, if not print a message
+ // and skip the map property. Only maps with string keys can go into additional properties
+ sch := prop.Schema()
+ if sch == nil {
+ return fmt.Errorf("items doesn't support maps")
+ }
+ if keyIdent, ok := ftpe.Key.(*ast.Ident); sch != nil && ok {
+ if keyIdent.Name == "string" {
+ if sch.AdditionalProperties == nil {
+ sch.AdditionalProperties = new(spec.SchemaOrBool)
+ }
+ sch.AdditionalProperties.Allows = false
+ if sch.AdditionalProperties.Schema == nil {
+ sch.AdditionalProperties.Schema = new(spec.Schema)
+ }
+ if err := scp.parseNamedType(gofile, ftpe.Value, schemaTypable{sch.AdditionalProperties.Schema, 0}); err != nil {
+ return err
+ }
+ sch.Typed("object", "")
+ }
+ }
+
+ case *ast.InterfaceType:
+ prop.Schema().Typed("object", "")
+ default:
+ pos := "unknown file:unknown position"
+ if scp != nil {
+ if scp.program != nil {
+ if scp.program.Fset != nil {
+ pos = scp.program.Fset.Position(expr.Pos()).String()
+ }
+ }
+ }
+ return fmt.Errorf("expr (%s) is unsupported for a schema", pos)
+ }
+ return nil
+}
+
+func (scp *schemaParser) parseEmbeddedType(gofile *ast.File, schema *spec.Schema, expr ast.Expr, seenPreviously map[string]string) error {
+ switch tpe := expr.(type) {
+ case *ast.Ident:
+ // do lookup of type
+ // take primitives into account, they should result in an error for swagger
+ pkg, err := scp.packageForFile(gofile, tpe)
+ if err != nil {
+ return err
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Name)
+ if err != nil {
+ return err
+ }
+
+ switch st := ts.Type.(type) {
+ case *ast.StructType:
+ return scp.parseStructType(file, schema, st, seenPreviously)
+ case *ast.InterfaceType:
+ return scp.parseInterfaceType(file, schema, st, seenPreviously)
+ default:
+ prop := &schemaTypable{schema, 0}
+ return scp.parseNamedType(gofile, st, prop)
+ }
+
+ case *ast.SelectorExpr:
+ // look up package, file and then type
+ pkg, err := scp.packageForSelector(gofile, tpe.X)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ if st, ok := ts.Type.(*ast.StructType); ok {
+ return scp.parseStructType(file, schema, st, seenPreviously)
+ }
+ if st, ok := ts.Type.(*ast.InterfaceType); ok {
+ return scp.parseInterfaceType(file, schema, st, seenPreviously)
+ }
+ case *ast.StarExpr:
+ return scp.parseEmbeddedType(gofile, schema, tpe.X, seenPreviously)
+ default:
+ return fmt.Errorf(
+ "parseEmbeddedType: unsupported type %v at position %#v",
+ expr,
+ scp.program.Fset.Position(tpe.Pos()),
+ )
+ }
+ return fmt.Errorf("unable to resolve embedded struct for: %v", expr)
+}
+
+func (scp *schemaParser) parseAllOfMember(gofile *ast.File, schema *spec.Schema, expr ast.Expr, seenPreviously map[string]string) error {
+ // TODO: check if struct is annotated with swagger:model or known in the definitions otherwise
+ var pkg *loader.PackageInfo
+ var file *ast.File
+ var gd *ast.GenDecl
+ var ts *ast.TypeSpec
+ var err error
+
+ switch tpe := expr.(type) {
+ case *ast.Ident:
+ // do lookup of type
+ // take primitives into account, they should result in an error for swagger
+ pkg, err = scp.packageForFile(gofile, tpe)
+ if err != nil {
+ return err
+ }
+ file, gd, ts, err = findSourceFile(pkg, tpe.Name)
+ if err != nil {
+ return err
+ }
+
+ case *ast.SelectorExpr:
+ // look up package, file and then type
+ pkg, err = scp.packageForSelector(gofile, tpe.X)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, gd, ts, err = findSourceFile(pkg, tpe.Sel.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ default:
+ return fmt.Errorf("unable to resolve allOf member for: %v", expr)
+ }
+
+ sd := newSchemaDecl(file, gd, ts)
+ if sd.hasAnnotation() && pkg.String() != "time" && ts.Name.Name != "Time" {
+ ref, err := spec.NewRef("#/definitions/" + sd.Name)
+ if err != nil {
+ return err
+ }
+ schema.Ref = ref
+ scp.postDecls = append(scp.postDecls, *sd)
+ } else {
+ switch st := ts.Type.(type) {
+ case *ast.StructType:
+ return scp.parseStructType(file, schema, st, seenPreviously)
+ case *ast.InterfaceType:
+ return scp.parseInterfaceType(file, schema, st, seenPreviously)
+ }
+ }
+
+ return nil
+}
+func (scp *schemaParser) parseInterfaceType(gofile *ast.File, bschema *spec.Schema, tpe *ast.InterfaceType, seenPreviously map[string]string) error {
+ if tpe.Methods == nil {
+ return nil
+ }
+
+ // first check if this has embedded interfaces, if so make sure to refer to those by ref
+ // when they are decorated with an allOf annotation
+ // go over the method list again and this time collect the nullary methods and parse the comments
+ // as if they are properties on a struct
+ var schema *spec.Schema
+ seenProperties := seenPreviously
+ hasAllOf := false
+
+ for _, fld := range tpe.Methods.List {
+ if len(fld.Names) == 0 {
+ // if this created an allOf property then we have to rejig the schema var
+ // because all the fields collected that aren't from embedded structs should go in
+ // their own proper schema
+ // first process embedded structs in order of embedding
+ if allOfMember(fld.Doc) {
+ hasAllOf = true
+ if schema == nil {
+ schema = new(spec.Schema)
+ }
+ var newSch spec.Schema
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := scp.parseAllOfMember(gofile, &newSch, fld.Type, seenProperties); err != nil {
+ return err
+ }
+
+ if fld.Doc != nil {
+ for _, cmt := range fld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxAllOf.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ mv := matches[ml-1]
+ if mv != "" {
+ addExtension(&bschema.VendorExtensible, "x-class", mv)
+ }
+ }
+ }
+ }
+ }
+
+ bschema.AllOf = append(bschema.AllOf, newSch)
+ continue
+ }
+
+ var newSch spec.Schema
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := scp.parseEmbeddedType(gofile, &newSch, fld.Type, seenProperties); err != nil {
+ return err
+ }
+ bschema.AllOf = append(bschema.AllOf, newSch)
+ hasAllOf = true
+ }
+ }
+
+ if schema == nil {
+ schema = bschema
+ }
+ // then add and possibly override values
+ if schema.Properties == nil {
+ schema.Properties = make(map[string]spec.Schema)
+ }
+ schema.Typed("object", "")
+ for _, fld := range tpe.Methods.List {
+ if mtpe, ok := fld.Type.(*ast.FuncType); ok && mtpe.Params.NumFields() == 0 && mtpe.Results.NumFields() == 1 {
+ gnm := fld.Names[0].Name
+ nm := gnm
+ if fld.Doc != nil {
+ for _, cmt := range fld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxName.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ nm = matches[ml-1]
+ }
+ }
+ }
+ }
+
+ ps := schema.Properties[nm]
+ if err := parseProperty(scp, gofile, mtpe.Results.List[0].Type, schemaTypable{&ps, 0}); err != nil {
+ return err
+ }
+
+ if err := scp.createParser(nm, schema, &ps, fld).Parse(fld.Doc); err != nil {
+ return err
+ }
+
+ if ps.Ref.String() == "" && nm != gnm {
+ addExtension(&ps.VendorExtensible, "x-go-name", gnm)
+ }
+ seenProperties[nm] = gnm
+ schema.Properties[nm] = ps
+ }
+
+ }
+ if schema != nil && hasAllOf && len(schema.Properties) > 0 {
+ bschema.AllOf = append(bschema.AllOf, *schema)
+ }
+ for k := range schema.Properties {
+ if _, ok := seenProperties[k]; !ok {
+ delete(schema.Properties, k)
+ }
+ }
+ return nil
+}
+
+func (scp *schemaParser) parseStructType(gofile *ast.File, bschema *spec.Schema, tpe *ast.StructType, seenPreviously map[string]string) error {
+ if tpe.Fields == nil {
+ return nil
+ }
+ var schema *spec.Schema
+ seenProperties := seenPreviously
+ hasAllOf := false
+
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) == 0 {
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(fld.Doc) {
+ continue
+ }
+
+ _, ignore, _, err := parseJSONTag(fld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ // if this created an allOf property then we have to rejig the schema var
+ // because all the fields collected that aren't from embedded structs should go in
+ // their own proper schema
+ // first process embedded structs in order of embedding
+ if allOfMember(fld.Doc) {
+ hasAllOf = true
+ if schema == nil {
+ schema = new(spec.Schema)
+ }
+ var newSch spec.Schema
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := scp.parseAllOfMember(gofile, &newSch, fld.Type, seenProperties); err != nil {
+ return err
+ }
+
+ if fld.Doc != nil {
+ for _, cmt := range fld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxAllOf.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ mv := matches[ml-1]
+ if mv != "" {
+ addExtension(&bschema.VendorExtensible, "x-class", mv)
+ }
+ }
+ }
+ }
+ }
+
+ bschema.AllOf = append(bschema.AllOf, newSch)
+ continue
+ }
+ if schema == nil {
+ schema = bschema
+ }
+
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := scp.parseEmbeddedType(gofile, schema, fld.Type, seenProperties); err != nil {
+ return err
+ }
+ }
+ }
+ if schema == nil {
+ schema = bschema
+ }
+
+ // then add and possibly override values
+ if schema.Properties == nil {
+ schema.Properties = make(map[string]spec.Schema)
+ }
+ schema.Typed("object", "")
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() {
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(fld.Doc) {
+ continue
+ }
+
+ gnm := fld.Names[0].Name
+ nm, ignore, isString, err := parseJSONTag(fld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ for seenTagName, seenFieldName := range seenPreviously {
+ if seenFieldName == gnm {
+ delete(schema.Properties, seenTagName)
+ break
+ }
+ }
+ continue
+ }
+
+ ps := schema.Properties[nm]
+ if err := parseProperty(scp, gofile, fld.Type, schemaTypable{&ps, 0}); err != nil {
+ return err
+ }
+ if isString {
+ ps.Typed("string", ps.Format)
+ ps.Ref = spec.Ref{}
+ }
+ if strfmtName, ok := strfmtName(fld.Doc); ok {
+ ps.Typed("string", strfmtName)
+ ps.Ref = spec.Ref{}
+ }
+
+ if err := scp.createParser(nm, schema, &ps, fld).Parse(fld.Doc); err != nil {
+ return err
+ }
+
+ if ps.Ref.String() == "" && nm != gnm {
+ addExtension(&ps.VendorExtensible, "x-go-name", gnm)
+ }
+ // we have 2 cases:
+ // 1. field with different name override tag
+ // 2. field with different name removes tag
+ // so we need to save both tag&name
+ seenProperties[nm] = gnm
+ schema.Properties[nm] = ps
+ }
+ }
+ if schema != nil && hasAllOf && len(schema.Properties) > 0 {
+ bschema.AllOf = append(bschema.AllOf, *schema)
+ }
+ for k := range schema.Properties {
+ if _, ok := seenProperties[k]; !ok {
+ delete(schema.Properties, k)
+ }
+ }
+ return nil
+}
+
+var schemaVendorExtensibleParser = vendorExtensibleParser{
+ setExtensions: func(ext spec.Extensions, dest interface{}) {
+ dest.(*spec.Schema).Extensions = ext
+ },
+}
+
+func (scp *schemaParser) createParser(nm string, schema, ps *spec.Schema, fld *ast.Field) *sectionedParser {
+ sp := new(sectionedParser)
+
+ schemeType, err := ps.Type.MarshalJSON()
+ if err != nil {
+ return nil
+ }
+
+ if ps.Ref.String() == "" {
+ sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("maximum", &setMaximum{schemaValidations{ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{schemaValidations{ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{schemaValidations{ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{schemaValidations{ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{schemaValidations{ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{schemaValidations{ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{schemaValidations{ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{schemaValidations{ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{schemaValidations{ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{schemaValidations{ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("type", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxExampleFmt, "")}),
+ newSingleLineTagParser("required", &setRequiredSchema{schema, nm}),
+ newSingleLineTagParser("readOnly", &setReadOnlySchema{ps}),
+ newSingleLineTagParser("discriminator", &setDiscriminator{schema, nm}),
+ newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, schemaVendorExtensibleParser.ParseInto(ps)), true),
+ }
+
+ itemsTaggers := func(items *spec.Schema, level int) []tagParser {
+ schemeType, err := items.Type.MarshalJSON()
+ if err != nil {
+ return nil
+ }
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{schemaValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{schemaValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{schemaValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{schemaValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{schemaValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{schemaValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{schemaValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{schemaValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{schemaValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{schemaValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error) {
+ if items == nil || items.Schema == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items.Schema, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Schema.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items.Schema, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Schema.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", nm)
+ }
+ }
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := fld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return sp
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ } else {
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("required", &setRequiredSchema{schema, nm}),
+ }
+ }
+ return sp
+}
+
+// hasFilePathPrefix reports whether the filesystem path s begins with the
+// elements in prefix.
+//
+// taken from: https://github.com/golang/go/blob/c87520c5981ecdeaa99e7ba636a6088f900c0c75/src/cmd/go/internal/load/path.go#L60-L80
+func hasFilePathPrefix(s, prefix string) bool {
+ sv := strings.ToUpper(filepath.VolumeName(s))
+ pv := strings.ToUpper(filepath.VolumeName(prefix))
+ s = s[len(sv):]
+ prefix = prefix[len(pv):]
+ switch {
+ default:
+ return false
+ case sv != pv:
+ return false
+ case len(s) == len(prefix):
+ return s == prefix
+ case len(s) > len(prefix):
+ if prefix != "" && prefix[len(prefix)-1] == filepath.Separator {
+ return strings.HasPrefix(s, prefix)
+ }
+ return s[len(prefix)] == filepath.Separator && s[:len(prefix)] == prefix
+ }
+}
+
+func goroot() string {
+ cmd := exec.Command("go", "env", "GOROOT")
+ out, err := cmd.Output()
+ if err != nil {
+ panic("Could not detect GOROOT")
+ }
+ return string(out)
+}
+
+func (scp *schemaParser) packageForFile(gofile *ast.File, tpe *ast.Ident) (*loader.PackageInfo, error) {
+ fn := scp.program.Fset.File(gofile.Pos()).Name()
+ if Debug {
+ log.Println("trying for", fn, tpe.Name, tpe.String())
+ }
+ fa, err := filepath.Abs(fn)
+ if err != nil {
+ return nil, err
+ }
+ if Debug {
+ log.Println("absolute path", fa)
+ }
+ var fgp string
+ gopath := os.Getenv("GOPATH")
+ if gopath == "" {
+ gopath = filepath.Join(os.Getenv("HOME"), "go")
+ }
+ for _, p := range append(filepath.SplitList(gopath), goroot()) {
+ pref := filepath.Join(p, "src")
+ if hasFilePathPrefix(fa, pref) {
+ fgp = filepath.Dir(strings.TrimPrefix(fa, pref))[1:]
+ break
+ }
+ }
+ if Debug {
+ log.Println("package in gopath", fgp)
+ }
+ for pkg, pkgInfo := range scp.program.AllPackages {
+ if Debug {
+ log.Println("inferring for", tpe.Name, "with", gofile.Name.Name, "at", pkg.Path(), "against", filepath.ToSlash(fgp))
+ }
+ if pkg.Name() == gofile.Name.Name && filepath.ToSlash(fgp) == pkg.Path() {
+ return pkgInfo, nil
+ }
+ }
+
+ return nil, fmt.Errorf("unable to determine package for %s", fn)
+}
+
+func (scp *schemaParser) packageForSelector(gofile *ast.File, expr ast.Expr) (*loader.PackageInfo, error) {
+
+ if pth, ok := expr.(*ast.Ident); ok {
+ // lookup import
+ var selPath string
+ for _, imp := range gofile.Imports {
+ pv, err := strconv.Unquote(imp.Path.Value)
+ if err != nil {
+ pv = imp.Path.Value
+ }
+ if imp.Name != nil {
+ if imp.Name.Name == pth.Name {
+ selPath = pv
+ break
+ }
+ } else {
+ pkg := scp.program.Package(pv)
+ if pkg != nil && pth.Name == pkg.Pkg.Name() {
+ selPath = pv
+ break
+ } else {
+ parts := strings.Split(pv, "/")
+ if len(parts) > 0 && parts[len(parts)-1] == pth.Name {
+ selPath = pv
+ break
+ }
+ }
+ }
+ }
+ // find actual struct
+ if selPath == "" {
+ return nil, fmt.Errorf("no import found for %s", pth.Name)
+ }
+
+ pkg := scp.program.Package(selPath)
+ if pkg != nil {
+ return pkg, nil
+ }
+ // TODO: I must admit this made me cry, it's not even a great solution.
+ pkg = scp.program.Package("github.com/go-swagger/go-swagger/vendor/" + selPath)
+ if pkg != nil {
+ return pkg, nil
+ }
+ for _, info := range scp.program.AllPackages {
+ n := info.String()
+ path := "/vendor/" + selPath
+ if strings.HasSuffix(n, path) {
+ pkg = scp.program.Package(n)
+ return pkg, nil
+ }
+ }
+ }
+ return nil, fmt.Errorf("can't determine selector path from %v", expr)
+}
+
+func (scp *schemaParser) makeRef(file *ast.File, pkg *loader.PackageInfo, gd *ast.GenDecl, ts *ast.TypeSpec, prop swaggerTypable) error {
+ sd := newSchemaDecl(file, gd, ts)
+ sd.inferNames()
+ // make an exception for time.Time because this is a well-known string format
+ if sd.Name == "Time" && pkg.String() == "time" {
+ return nil
+ }
+ ref, err := spec.NewRef("#/definitions/" + sd.Name)
+ if err != nil {
+ return err
+ }
+ prop.SetRef(ref)
+ scp.postDecls = append(scp.postDecls, *sd)
+ return nil
+}
+
+func (scp *schemaParser) parseIdentProperty(pkg *loader.PackageInfo, expr *ast.Ident, prop swaggerTypable) error {
+ // before proceeding make an exception to time.Time because it is a well known string format
+ if pkg.String() == "time" && expr.String() == "Time" {
+ prop.Typed("string", "date-time")
+ return nil
+ }
+
+ // find the file this selector points to
+ file, gd, ts, err := findSourceFile(pkg, expr.Name)
+
+ if err != nil {
+ err := swaggerSchemaForType(expr.Name, prop)
+ if err != nil {
+ return fmt.Errorf("package %s, error is: %v", pkg.String(), err)
+ }
+ return nil
+ }
+
+ if at, ok := ts.Type.(*ast.ArrayType); ok {
+ // the swagger spec defines strfmt base64 as []byte.
+ // in that case we don't actually want to turn it into an array
+ // but we want to turn it into a string
+ if _, ok := at.Elt.(*ast.Ident); ok {
+ if strfmtName, ok := strfmtName(gd.Doc); ok {
+ prop.Typed("string", strfmtName)
+ return nil
+ }
+ }
+ // this is a selector, so most likely not base64
+ if strfmtName, ok := strfmtName(gd.Doc); ok {
+ prop.Items().Typed("string", strfmtName)
+ return nil
+ }
+ }
+
+ // look at doc comments for swagger:strfmt [name]
+ // when found this is the format name, create a schema with that name
+ if strfmtName, ok := strfmtName(gd.Doc); ok {
+ prop.Typed("string", strfmtName)
+ return nil
+ }
+
+ if enumName, ok := enumName(gd.Doc); ok {
+ var enumValues = getEnumValues(file, enumName)
+ if len(enumValues) > 0 {
+ prop.WithEnum(enumValues...)
+ var typeName = reflect.TypeOf(enumValues[0]).String()
+ err := swaggerSchemaForType(typeName, prop)
+ if err != nil {
+ return fmt.Errorf("file %s, error is: %v", file.Name, err)
+ }
+ }
+ }
+
+ if defaultName, ok := defaultName(gd.Doc); ok {
+ log.Println(defaultName)
+ return nil
+ }
+
+ if typeName, ok := typeName(gd.Doc); ok {
+ _ = swaggerSchemaForType(typeName, prop)
+ return nil
+ }
+
+ if isAliasParam(prop) || aliasParam(gd.Doc) {
+ itype, ok := ts.Type.(*ast.Ident)
+ if ok {
+ err := swaggerSchemaForType(itype.Name, prop)
+ if err == nil {
+ return nil
+ }
+ }
+ }
+ switch tpe := ts.Type.(type) {
+ case *ast.ArrayType:
+ return scp.makeRef(file, pkg, gd, ts, prop)
+ case *ast.StructType:
+ return scp.makeRef(file, pkg, gd, ts, prop)
+
+ case *ast.Ident:
+ return scp.makeRef(file, pkg, gd, ts, prop)
+
+ case *ast.StarExpr:
+ return parseProperty(scp, file, tpe.X, prop)
+
+ case *ast.SelectorExpr:
+ // return scp.refForSelector(file, gd, tpe, ts, prop)
+ return scp.makeRef(file, pkg, gd, ts, prop)
+
+ case *ast.InterfaceType:
+ return scp.makeRef(file, pkg, gd, ts, prop)
+
+ case *ast.MapType:
+ return scp.makeRef(file, pkg, gd, ts, prop)
+
+ default:
+ err := swaggerSchemaForType(expr.Name, prop)
+ if err != nil {
+ return fmt.Errorf("package %s, error is: %v", pkg.String(), err)
+ }
+ return nil
+ }
+
+}
+
+func (scp *schemaParser) typeForSelector(gofile *ast.File, expr *ast.SelectorExpr, prop swaggerTypable) error {
+ pkg, err := scp.packageForSelector(gofile, expr.X)
+ if err != nil {
+ return err
+ }
+
+ return scp.parseIdentProperty(pkg, expr.Sel, prop)
+}
+
+func findSourceFile(pkg *loader.PackageInfo, typeName string) (*ast.File, *ast.GenDecl, *ast.TypeSpec, error) {
+ for _, file := range pkg.Files {
+ for _, decl := range file.Decls {
+ if gd, ok := decl.(*ast.GenDecl); ok {
+ for _, gs := range gd.Specs {
+ if ts, ok := gs.(*ast.TypeSpec); ok {
+ strfmtNme, isStrfmt := strfmtName(gd.Doc)
+ if (isStrfmt && strfmtNme == typeName) || ts.Name != nil && ts.Name.Name == typeName {
+ return file, gd, ts, nil
+ }
+ }
+ }
+ }
+ }
+ }
+ return nil, nil, nil, fmt.Errorf("unable to find %s in %s", typeName, pkg.String())
+}
+
+func allOfMember(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxAllOf.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func fileParam(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxFileUpload.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func strfmtName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxStrFmt.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func ignored(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxIgnoreOverride.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func enumName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxEnum.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func aliasParam(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxAlias.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func defaultName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxDefault.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func typeName(comments *ast.CommentGroup) (string, bool) {
+
+ var typ string
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxType.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ typ = strings.TrimSpace(matches[1])
+ return typ, true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func parseProperty(scp *schemaParser, gofile *ast.File, fld ast.Expr, prop swaggerTypable) error {
+ switch ftpe := fld.(type) {
+ case *ast.Ident: // simple value
+ pkg, err := scp.packageForFile(gofile, ftpe)
+ if err != nil {
+ return err
+ }
+ return scp.parseIdentProperty(pkg, ftpe, prop)
+
+ case *ast.StarExpr: // pointer to something, optional by default
+ if err := parseProperty(scp, gofile, ftpe.X, prop); err != nil {
+ return err
+ }
+
+ case *ast.ArrayType: // slice type
+ if err := parseProperty(scp, gofile, ftpe.Elt, prop.Items()); err != nil {
+ return err
+ }
+
+ case *ast.StructType:
+ schema := prop.Schema()
+ if schema == nil {
+ return fmt.Errorf("items doesn't support embedded structs")
+ }
+ return scp.parseStructType(gofile, prop.Schema(), ftpe, make(map[string]string))
+
+ case *ast.SelectorExpr:
+ err := scp.typeForSelector(gofile, ftpe, prop)
+ return err
+
+ case *ast.MapType:
+ // check if key is a string type, if not print a message
+ // and skip the map property. Only maps with string keys can go into additional properties
+ sch := prop.Schema()
+ if sch == nil {
+ return fmt.Errorf("items doesn't support maps")
+ }
+ if keyIdent, ok := ftpe.Key.(*ast.Ident); sch != nil && ok {
+ if keyIdent.Name == "string" {
+ if sch.AdditionalProperties == nil {
+ sch.AdditionalProperties = new(spec.SchemaOrBool)
+ }
+ sch.AdditionalProperties.Allows = false
+ if sch.AdditionalProperties.Schema == nil {
+ sch.AdditionalProperties.Schema = new(spec.Schema)
+ }
+ if err := parseProperty(scp, gofile, ftpe.Value, schemaTypable{sch.AdditionalProperties.Schema, 0}); err != nil {
+ return err
+ }
+ sch.Typed("object", "")
+ }
+ }
+
+ case *ast.InterfaceType:
+ prop.Schema().Typed("object", "")
+ default:
+ pos := "unknown file:unknown position"
+ if scp != nil {
+ if scp.program != nil {
+ if scp.program.Fset != nil {
+ pos = scp.program.Fset.Position(fld.Pos()).String()
+ }
+ }
+ }
+ return fmt.Errorf("Expr (%s) is unsupported for a schema", pos)
+ }
+ return nil
+}
+
+func parseJSONTag(field *ast.Field) (name string, ignore bool, isString bool, err error) {
+ if len(field.Names) > 0 {
+ name = field.Names[0].Name
+ }
+ if field.Tag != nil && len(strings.TrimSpace(field.Tag.Value)) > 0 {
+ tv, err := strconv.Unquote(field.Tag.Value)
+ if err != nil {
+ return name, false, false, err
+ }
+
+ if strings.TrimSpace(tv) != "" {
+ st := reflect.StructTag(tv)
+ jsonParts := strings.Split(st.Get("json"), ",")
+ jsonName := jsonParts[0]
+
+ if len(jsonParts) > 1 && jsonParts[1] == "string" {
+ isString = isFieldStringable(field.Type)
+ }
+
+ if jsonName == "-" {
+ return name, true, isString, nil
+ } else if jsonName != "" {
+ return jsonName, false, isString, nil
+ }
+ }
+ }
+ return name, false, false, nil
+}
+
+// isFieldStringable check if the field type is a scalar. If the field type is
+// *ast.StarExpr and is pointer type, check if it refers to a scalar.
+// Otherwise, the ",string" directive doesn't apply.
+func isFieldStringable(tpe ast.Expr) bool {
+ if ident, ok := tpe.(*ast.Ident); ok {
+ switch ident.Name {
+ case "int", "int8", "int16", "int32", "int64",
+ "uint", "uint8", "uint16", "uint32", "uint64",
+ "float64", "string", "bool":
+ return true
+ }
+ } else if starExpr, ok := tpe.(*ast.StarExpr); ok {
+ return isFieldStringable(starExpr.X)
+ } else {
+ return false
+ }
+ return false
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/validators.go b/vendor/github.com/go-swagger/go-swagger/scan/validators.go
new file mode 100644
index 000000000..45caf8783
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/validators.go
@@ -0,0 +1,829 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "encoding/json"
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+type validationBuilder interface {
+ SetMaximum(float64, bool)
+ SetMinimum(float64, bool)
+ SetMultipleOf(float64)
+
+ SetMinItems(int64)
+ SetMaxItems(int64)
+
+ SetMinLength(int64)
+ SetMaxLength(int64)
+ SetPattern(string)
+
+ SetUnique(bool)
+ SetEnum(string)
+ SetDefault(interface{})
+ SetExample(interface{})
+}
+
+type valueParser interface {
+ Parse([]string) error
+ Matches(string) bool
+}
+
+type setMaximum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaximum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[2]) > 0 {
+ max, err := strconv.ParseFloat(matches[2], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaximum(max, matches[1] == "<")
+ }
+ return nil
+}
+
+func (sm *setMaximum) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setMinimum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinimum) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMinimum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[2]) > 0 {
+ min, err := strconv.ParseFloat(matches[2], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinimum(min, matches[1] == ">")
+ }
+ return nil
+}
+
+type setMultipleOf struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMultipleOf) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMultipleOf) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[1]) > 0 {
+ multipleOf, err := strconv.ParseFloat(matches[1], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMultipleOf(multipleOf)
+ }
+ return nil
+}
+
+type setMaxItems struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaxItems) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMaxItems) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ maxItems, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaxItems(maxItems)
+ }
+ return nil
+}
+
+type setMinItems struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinItems) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMinItems) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ minItems, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinItems(minItems)
+ }
+ return nil
+}
+
+type setMaxLength struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaxLength) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ maxLength, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaxLength(maxLength)
+ }
+ return nil
+}
+
+func (sm *setMaxLength) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setMinLength struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinLength) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ minLength, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinLength(minLength)
+ }
+ return nil
+}
+
+func (sm *setMinLength) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setPattern struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setPattern) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sm.builder.SetPattern(matches[1])
+ }
+ return nil
+}
+
+func (sm *setPattern) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setCollectionFormat struct {
+ builder operationValidationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setCollectionFormat) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sm.builder.SetCollectionFormat(matches[1])
+ }
+ return nil
+}
+
+func (sm *setCollectionFormat) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setUnique struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (su *setUnique) Matches(line string) bool {
+ return su.rx.MatchString(line)
+}
+
+func (su *setUnique) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := su.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.builder.SetUnique(req)
+ }
+ return nil
+}
+
+type setEnum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (se *setEnum) Matches(line string) bool {
+ return se.rx.MatchString(line)
+}
+
+func (se *setEnum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := se.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ se.builder.SetEnum(matches[1])
+ }
+ return nil
+}
+
+func parseValueFromSchema(s string, schema *spec.SimpleSchema) (interface{}, error) {
+ if schema != nil {
+ switch strings.Trim(schema.TypeName(), "\"") {
+ case "integer", "int", "int64", "int32", "int16":
+ return strconv.Atoi(s)
+ case "bool", "boolean":
+ return strconv.ParseBool(s)
+ case "number", "float64", "float32":
+ return strconv.ParseFloat(s, 64)
+ case "object":
+ var obj map[string]interface{}
+ if err := json.Unmarshal([]byte(s), &obj); err != nil {
+ // If we can't parse it, just return the string.
+ return s, nil
+ }
+ return obj, nil
+ case "array":
+ var slice []interface{}
+ if err := json.Unmarshal([]byte(s), &slice); err != nil {
+ // If we can't parse it, just return the string.
+ return s, nil
+ }
+ return slice, nil
+ default:
+ return s, nil
+ }
+ } else {
+ return s, nil
+ }
+}
+
+type setDefault struct {
+ scheme *spec.SimpleSchema
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sd *setDefault) Matches(line string) bool {
+ return sd.rx.MatchString(line)
+}
+
+func (sd *setDefault) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sd.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ d, err := parseValueFromSchema(matches[1], sd.scheme)
+ if err != nil {
+ return err
+ }
+ sd.builder.SetDefault(d)
+ }
+ return nil
+}
+
+type setExample struct {
+ scheme *spec.SimpleSchema
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (se *setExample) Matches(line string) bool {
+ return se.rx.MatchString(line)
+}
+
+func (se *setExample) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := se.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ d, err := parseValueFromSchema(matches[1], se.scheme)
+ if err != nil {
+ return err
+ }
+ se.builder.SetExample(d)
+ }
+ return nil
+}
+
+type matchOnlyParam struct {
+ tgt *spec.Parameter
+ rx *regexp.Regexp
+}
+
+func (mo *matchOnlyParam) Matches(line string) bool {
+ return mo.rx.MatchString(line)
+}
+
+func (mo *matchOnlyParam) Parse(lines []string) error {
+ return nil
+}
+
+type setRequiredParam struct {
+ tgt *spec.Parameter
+}
+
+func (su *setRequiredParam) Matches(line string) bool {
+ return rxRequired.MatchString(line)
+}
+
+func (su *setRequiredParam) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxRequired.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.tgt.Required = req
+ }
+ return nil
+}
+
+type setReadOnlySchema struct {
+ tgt *spec.Schema
+}
+
+func (su *setReadOnlySchema) Matches(line string) bool {
+ return rxReadOnly.MatchString(line)
+}
+
+func (su *setReadOnlySchema) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxReadOnly.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.tgt.ReadOnly = req
+ }
+ return nil
+}
+
+type setDiscriminator struct {
+ schema *spec.Schema
+ field string
+}
+
+func (su *setDiscriminator) Matches(line string) bool {
+ return rxDiscriminator.MatchString(line)
+}
+
+func (su *setDiscriminator) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxDiscriminator.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ if req {
+ su.schema.Discriminator = su.field
+ } else {
+ if su.schema.Discriminator == su.field {
+ su.schema.Discriminator = ""
+ }
+ }
+ }
+ return nil
+}
+
+type setRequiredSchema struct {
+ schema *spec.Schema
+ field string
+}
+
+func (su *setRequiredSchema) Matches(line string) bool {
+ return rxRequired.MatchString(line)
+}
+
+func (su *setRequiredSchema) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxRequired.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ midx := -1
+ for i, nm := range su.schema.Required {
+ if nm == su.field {
+ midx = i
+ break
+ }
+ }
+ if req {
+ if midx < 0 {
+ su.schema.Required = append(su.schema.Required, su.field)
+ }
+ } else if midx >= 0 {
+ su.schema.Required = append(su.schema.Required[:midx], su.schema.Required[midx+1:]...)
+ }
+ }
+ return nil
+}
+
+func newMultilineDropEmptyParser(rx *regexp.Regexp, set func([]string)) *multiLineDropEmptyParser {
+ return &multiLineDropEmptyParser{
+ rx: rx,
+ set: set,
+ }
+}
+
+type multiLineDropEmptyParser struct {
+ set func([]string)
+ rx *regexp.Regexp
+}
+
+func (m *multiLineDropEmptyParser) Matches(line string) bool {
+ return m.rx.MatchString(line)
+}
+
+func (m *multiLineDropEmptyParser) Parse(lines []string) error {
+ m.set(removeEmptyLines(lines))
+ return nil
+}
+
+func newSetSchemes(set func([]string)) *setSchemes {
+ return &setSchemes{
+ set: set,
+ rx: rxSchemes,
+ }
+}
+
+type setSchemes struct {
+ set func([]string)
+ rx *regexp.Regexp
+}
+
+func (ss *setSchemes) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+func (ss *setSchemes) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := ss.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sch := strings.Split(matches[1], ", ")
+
+ var schemes []string
+ for _, s := range sch {
+ ts := strings.TrimSpace(s)
+ if ts != "" {
+ schemes = append(schemes, ts)
+ }
+ }
+ ss.set(schemes)
+ }
+ return nil
+}
+
+func newSetSecurity(rx *regexp.Regexp, setter func([]map[string][]string)) *setSecurity {
+ return &setSecurity{
+ set: setter,
+ rx: rx,
+ }
+}
+
+type setSecurity struct {
+ set func([]map[string][]string)
+ rx *regexp.Regexp
+}
+
+func (ss *setSecurity) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+func (ss *setSecurity) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var result []map[string][]string
+ for _, line := range lines {
+ kv := strings.SplitN(line, ":", 2)
+ scopes := []string{}
+ var key string
+
+ if len(kv) > 1 {
+ scs := strings.Split(kv[1], ",")
+ for _, scope := range scs {
+ tr := strings.TrimSpace(scope)
+ if tr != "" {
+ tr = strings.SplitAfter(tr, " ")[0]
+ scopes = append(scopes, strings.TrimSpace(tr))
+ }
+ }
+
+ key = strings.TrimSpace(kv[0])
+
+ result = append(result, map[string][]string{key: scopes})
+ }
+ }
+ ss.set(result)
+ return nil
+}
+
+func newSetResponses(definitions map[string]spec.Schema, responses map[string]spec.Response, setter func(*spec.Response, map[int]spec.Response)) *setOpResponses {
+ return &setOpResponses{
+ set: setter,
+ rx: rxResponses,
+ definitions: definitions,
+ responses: responses,
+ }
+}
+
+type setOpResponses struct {
+ set func(*spec.Response, map[int]spec.Response)
+ rx *regexp.Regexp
+ definitions map[string]spec.Schema
+ responses map[string]spec.Response
+}
+
+func (ss *setOpResponses) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+// ResponseTag used when specifying a response to point to a defined swagger:response
+const ResponseTag = "response"
+
+// BodyTag used when specifying a response to point to a model/schema
+const BodyTag = "body"
+
+// DescriptionTag used when specifying a response that gives a description of the response
+const DescriptionTag = "description"
+
+func parseTags(line string) (modelOrResponse string, arrays int, isDefinitionRef bool, description string, err error) {
+ tags := strings.Split(line, " ")
+ parsedModelOrResponse := false
+
+ for i, tagAndValue := range tags {
+ tagValList := strings.SplitN(tagAndValue, ":", 2)
+ var tag, value string
+ if len(tagValList) > 1 {
+ tag = tagValList[0]
+ value = tagValList[1]
+ } else {
+ //TODO: Print a warning, and in the long term, do not support not tagged values
+ //Add a default tag if none is supplied
+ if i == 0 {
+ tag = ResponseTag
+ } else {
+ tag = DescriptionTag
+ }
+ value = tagValList[0]
+ }
+
+ foundModelOrResponse := false
+ if !parsedModelOrResponse {
+ if tag == BodyTag {
+ foundModelOrResponse = true
+ isDefinitionRef = true
+ }
+ if tag == ResponseTag {
+ foundModelOrResponse = true
+ isDefinitionRef = false
+ }
+ }
+ if foundModelOrResponse {
+ //Read the model or response tag
+ parsedModelOrResponse = true
+ //Check for nested arrays
+ arrays = 0
+ for strings.HasPrefix(value, "[]") {
+ arrays++
+ value = value[2:]
+ }
+ //What's left over is the model name
+ modelOrResponse = value
+ } else {
+ foundDescription := false
+ if tag == DescriptionTag {
+ foundDescription = true
+ }
+ if foundDescription {
+ //Descriptions are special, they make they read the rest of the line
+ descriptionWords := []string{value}
+ if i < len(tags)-1 {
+ descriptionWords = append(descriptionWords, tags[i+1:]...)
+ }
+ description = strings.Join(descriptionWords, " ")
+ break
+ } else {
+ if tag == ResponseTag || tag == BodyTag || tag == DescriptionTag {
+ err = fmt.Errorf("Found valid tag %s, but not in a valid position", tag)
+ } else {
+ err = fmt.Errorf("Found invalid tag: %s", tag)
+ }
+ //return error
+ return
+ }
+ }
+ }
+
+ //TODO: Maybe do, if !parsedModelOrResponse {return some error}
+ return
+}
+
+func (ss *setOpResponses) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var def *spec.Response
+ var scr map[int]spec.Response
+
+ for _, line := range lines {
+ kv := strings.SplitN(line, ":", 2)
+ var key, value string
+
+ if len(kv) > 1 {
+ key = strings.TrimSpace(kv[0])
+ if key == "" {
+ // this must be some weird empty line
+ continue
+ }
+ value = strings.TrimSpace(kv[1])
+ if value == "" {
+ var resp spec.Response
+ if strings.EqualFold("default", key) {
+ if def == nil {
+ def = &resp
+ }
+ } else {
+ if sc, err := strconv.Atoi(key); err == nil {
+ if scr == nil {
+ scr = make(map[int]spec.Response)
+ }
+ scr[sc] = resp
+ }
+ }
+ continue
+ }
+ refTarget, arrays, isDefinitionRef, description, err := parseTags(value)
+ if err != nil {
+ return err
+ }
+ //A possible exception for having a definition
+ if _, ok := ss.responses[refTarget]; !ok {
+ if _, ok := ss.definitions[refTarget]; ok {
+ isDefinitionRef = true
+ }
+ }
+
+ var ref spec.Ref
+ if isDefinitionRef {
+ if description == "" {
+ description = refTarget
+ }
+ ref, err = spec.NewRef("#/definitions/" + refTarget)
+ } else {
+ ref, err = spec.NewRef("#/responses/" + refTarget)
+ }
+ if err != nil {
+ return err
+ }
+
+ // description should used on anyway.
+ resp := spec.Response{ResponseProps: spec.ResponseProps{Description: description}}
+
+ if isDefinitionRef {
+ resp.Schema = new(spec.Schema)
+ resp.Description = description
+ if arrays == 0 {
+ resp.Schema.Ref = ref
+ } else {
+ cs := resp.Schema
+ for i := 0; i < arrays; i++ {
+ cs.Typed("array", "")
+ cs.Items = new(spec.SchemaOrArray)
+ cs.Items.Schema = new(spec.Schema)
+ cs = cs.Items.Schema
+ }
+ cs.Ref = ref
+ }
+ // ref. could be empty while use description tag
+ } else if len(refTarget) > 0 {
+ resp.Ref = ref
+ }
+
+ if strings.EqualFold("default", key) {
+ if def == nil {
+ def = &resp
+ }
+ } else {
+ if sc, err := strconv.Atoi(key); err == nil {
+ if scr == nil {
+ scr = make(map[int]spec.Response)
+ }
+ scr[sc] = resp
+ }
+ }
+ }
+ }
+ ss.set(def, scr)
+ return nil
+}
+
+func parseEnum(val string, s *spec.SimpleSchema) []interface{} {
+ list := strings.Split(val, ",")
+ interfaceSlice := make([]interface{}, len(list))
+ for i, d := range list {
+ v, err := parseValueFromSchema(d, s)
+ if err != nil {
+ interfaceSlice[i] = d
+ continue
+ }
+
+ interfaceSlice[i] = v
+ }
+ return interfaceSlice
+}