Use vendored go-swagger (#8087)

* Use vendored go-swagger

* vendor go-swagger

* revert un wanteed change

* remove un-needed GO111MODULE

* Update Makefile

Co-Authored-By: techknowlogick <matti@mdranta.net>
This commit is contained in:
Antoine GIRARD 2019-09-04 21:53:54 +02:00 committed by Lauris BH
parent 4cb1bdddc8
commit 9fe4437bda
686 changed files with 143379 additions and 17 deletions

202
vendor/github.com/go-swagger/go-swagger/LICENSE generated vendored Normal file
View file

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,5 @@
swagger
swagger.json
models
operations
cmd

View file

@ -0,0 +1,115 @@
package commands
import (
"encoding/json"
"errors"
"io/ioutil"
"log"
"os"
"github.com/go-openapi/loads"
"github.com/go-swagger/go-swagger/cmd/swagger/commands/diff"
)
// JSONFormat for json
const JSONFormat = "json"
// DiffCommand is a command that generates the diff of two swagger specs.
//
// There are no specific options for this expansion.
type DiffCommand struct {
OnlyBreakingChanges bool `long:"break" short:"b" description:"When present, only shows incompatible changes"`
Format string `long:"format" short:"f" description:"When present, writes output as json" default:"txt" choice:"txt" choice:"json"`
IgnoreFile string `long:"ignore" short:"i" description:"Exception file of diffs to ignore (copy output from json diff format)" default:"none specified"`
Destination string `long:"dest" short:"d" description:"Output destination file or stdout" default:"stdout"`
}
// Execute diffs the two specs provided
func (c *DiffCommand) Execute(args []string) error {
if len(args) != 2 {
msg := `missing arguments for diff command (use --help for more info)`
return errors.New(msg)
}
log.Println("Run Config:")
log.Printf("Spec1: %s", args[0])
log.Printf("Spec2: %s", args[1])
log.Printf("ReportOnlyBreakingChanges (-c) :%v", c.OnlyBreakingChanges)
log.Printf("OutputFormat (-f) :%s", c.Format)
log.Printf("IgnoreFile (-i) :%s", c.IgnoreFile)
log.Printf("Diff Report Destination (-d) :%s", c.Destination)
diffs, err := getDiffs(args[0], args[1])
if err != nil {
return err
}
ignores, err := readIgnores(c.IgnoreFile)
if err != nil {
return err
}
diffs = diffs.FilterIgnores(ignores)
if len(ignores) > 0 {
log.Printf("Diff Report Ignored Items from IgnoreFile")
for _, eachItem := range ignores {
log.Printf("%s", eachItem.String())
}
}
if c.Format == JSONFormat {
err = diffs.ReportAllDiffs(true)
if err != nil {
return err
}
} else {
if c.OnlyBreakingChanges {
err = diffs.ReportCompatibility()
} else {
err = diffs.ReportAllDiffs(false)
}
}
return err
}
func readIgnores(ignoreFile string) (diff.SpecDifferences, error) {
ignoreDiffs := diff.SpecDifferences{}
if ignoreFile == "none specified" {
return ignoreDiffs, nil
}
// Open our jsonFile
jsonFile, err := os.Open(ignoreFile)
// if we os.Open returns an error then handle it
if err != nil {
return nil, err
}
// defer the closing of our jsonFile so that we can parse it later on
defer jsonFile.Close()
byteValue, err := ioutil.ReadAll(jsonFile)
if err != nil {
return nil, err
}
// def
err = json.Unmarshal(byteValue, &ignoreDiffs)
if err != nil {
return nil, err
}
return ignoreDiffs, nil
}
func getDiffs(oldSpecPath, newSpecPath string) (diff.SpecDifferences, error) {
swaggerDoc1 := oldSpecPath
specDoc1, err := loads.Spec(swaggerDoc1)
if err != nil {
return nil, err
}
swaggerDoc2 := newSpecPath
specDoc2, err := loads.Spec(swaggerDoc2)
if err != nil {
return nil, err
}
return diff.Compare(specDoc1.Spec(), specDoc2.Spec())
}

View file

@ -0,0 +1,99 @@
package diff
// This is a simple DSL for diffing arrays
// FromArrayStruct utility struct to encompass diffing of string arrays
type FromArrayStruct struct {
from []string
}
// FromStringArray starts a fluent diff expression
func FromStringArray(from []string) FromArrayStruct {
return FromArrayStruct{from}
}
// DiffsTo completes a fluent dff expression
func (f FromArrayStruct) DiffsTo(toArray []string) (added, deleted, common []string) {
inFrom := 1
inTo := 2
m := make(map[string]int)
for _, item := range f.from {
m[item] = inFrom
}
for _, item := range toArray {
if _, ok := m[item]; ok {
m[item] |= inTo
} else {
m[item] = inTo
}
}
for key, val := range m {
switch val {
case inFrom:
deleted = append(deleted, key)
case inTo:
added = append(added, key)
default:
common = append(common, key)
}
}
return
}
// FromMapStruct utility struct to encompass diffing of string arrays
type FromMapStruct struct {
srcMap map[string]interface{}
}
// FromStringMap starts a comparison by declaring a source map
func FromStringMap(srcMap map[string]interface{}) FromMapStruct {
return FromMapStruct{srcMap}
}
// Pair stores a pair of items which share a key in two maps
type Pair struct {
First interface{}
Second interface{}
}
// DiffsTo - generates diffs for a comparison
func (f FromMapStruct) DiffsTo(destMap map[string]interface{}) (added, deleted, common map[string]interface{}) {
added = make(map[string]interface{})
deleted = make(map[string]interface{})
common = make(map[string]interface{})
inSrc := 1
inDest := 2
m := make(map[string]int)
// enter values for all items in the source array
for key := range f.srcMap {
m[key] = inSrc
}
// now either set or 'boolean or' a new flag if in the second collection
for key := range destMap {
if _, ok := m[key]; ok {
m[key] |= inDest
} else {
m[key] = inDest
}
}
// finally inspect the values and generate the left,right and shared collections
// for the shared items, store both values in case there's a diff
for key, val := range m {
switch val {
case inSrc:
deleted[key] = f.srcMap[key]
case inDest:
added[key] = destMap[key]
default:
common[key] = Pair{f.srcMap[key], destMap[key]}
}
}
return added, deleted, common
}

View file

@ -0,0 +1,90 @@
package diff
// CompatibilityPolicy decides which changes are breaking and which are not
type CompatibilityPolicy struct {
ForResponse map[SpecChangeCode]Compatibility
ForRequest map[SpecChangeCode]Compatibility
ForChange map[SpecChangeCode]Compatibility
}
var compatibility CompatibilityPolicy
func init() {
compatibility = CompatibilityPolicy{
ForResponse: map[SpecChangeCode]Compatibility{
AddedRequiredProperty: Breaking,
DeletedProperty: Breaking,
AddedProperty: NonBreaking,
DeletedResponse: Breaking,
AddedResponse: NonBreaking,
WidenedType: NonBreaking,
NarrowedType: NonBreaking,
ChangedType: Breaking,
ChangedToCompatibleType: NonBreaking,
AddedEnumValue: Breaking,
DeletedEnumValue: NonBreaking,
AddedResponseHeader: NonBreaking,
ChangedResponseHeader: Breaking,
DeletedResponseHeader: Breaking,
ChangedDescripton: NonBreaking,
AddedDescripton: NonBreaking,
DeletedDescripton: NonBreaking,
ChangedTag: NonBreaking,
AddedTag: NonBreaking,
DeletedTag: NonBreaking,
},
ForRequest: map[SpecChangeCode]Compatibility{
AddedRequiredProperty: Breaking,
DeletedProperty: Breaking,
AddedProperty: Breaking,
AddedOptionalParam: NonBreaking,
AddedRequiredParam: Breaking,
DeletedOptionalParam: NonBreaking,
DeletedRequiredParam: NonBreaking,
WidenedType: NonBreaking,
NarrowedType: Breaking,
ChangedType: Breaking,
ChangedToCompatibleType: NonBreaking,
ChangedOptionalToRequiredParam: Breaking,
ChangedRequiredToOptionalParam: NonBreaking,
AddedEnumValue: NonBreaking,
DeletedEnumValue: Breaking,
ChangedDescripton: NonBreaking,
AddedDescripton: NonBreaking,
DeletedDescripton: NonBreaking,
ChangedTag: NonBreaking,
AddedTag: NonBreaking,
DeletedTag: NonBreaking,
},
ForChange: map[SpecChangeCode]Compatibility{
NoChangeDetected: NonBreaking,
AddedEndpoint: NonBreaking,
DeletedEndpoint: Breaking,
DeletedDeprecatedEndpoint: NonBreaking,
AddedConsumesFormat: NonBreaking,
DeletedConsumesFormat: Breaking,
AddedProducesFormat: Breaking,
DeletedProducesFormat: NonBreaking,
AddedSchemes: NonBreaking,
DeletedSchemes: Breaking,
ChangedHostURL: Breaking,
ChangedBasePath: Breaking,
ChangedDescripton: NonBreaking,
AddedDescripton: NonBreaking,
DeletedDescripton: NonBreaking,
ChangedTag: NonBreaking,
AddedTag: NonBreaking,
DeletedTag: NonBreaking,
},
}
}
func getCompatibilityForChange(diffCode SpecChangeCode, where DataDirection) Compatibility {
if compat, commonChange := compatibility.ForChange[diffCode]; commonChange {
return compat
}
if where == Request {
return compatibility.ForRequest[diffCode]
}
return compatibility.ForResponse[diffCode]
}

View file

@ -0,0 +1,22 @@
package diff
// DifferenceLocation indicates where the difference occurs
type DifferenceLocation struct {
URL string `json:"url"`
Method string `json:"method,omitempty"`
Response int `json:"response,omitempty"`
Node *Node `json:"node,omitempty"`
}
// AddNode returns a copy of this location with the leaf node added
func (dl DifferenceLocation) AddNode(node *Node) DifferenceLocation {
newLoc := dl
if newLoc.Node != nil {
newLoc.Node = newLoc.Node.Copy()
newLoc.Node.AddLeafNode(node)
} else {
newLoc.Node = node
}
return newLoc
}

View file

@ -0,0 +1,276 @@
package diff
import (
"bytes"
"encoding/json"
"fmt"
)
// SpecChangeCode enumerates the various types of diffs from one spec to another
type SpecChangeCode int
const (
// NoChangeDetected - the specs have no changes
NoChangeDetected SpecChangeCode = iota
// DeletedProperty - A message property has been deleted in the new spec
DeletedProperty
// AddedProperty - A message property has been added in the new spec
AddedProperty
// AddedRequiredProperty - A required message property has been added in the new spec
AddedRequiredProperty
// DeletedOptionalParam - An endpoint parameter has been deleted in the new spec
DeletedOptionalParam
// ChangedDescripton - Changed a description
ChangedDescripton
// AddedDescripton - Added a description
AddedDescripton
// DeletedDescripton - Deleted a description
DeletedDescripton
// ChangedTag - Changed a tag
ChangedTag
// AddedTag - Added a tag
AddedTag
// DeletedTag - Deleted a tag
DeletedTag
// DeletedResponse - An endpoint response has been deleted in the new spec
DeletedResponse
// DeletedEndpoint - An endpoint has been deleted in the new spec
DeletedEndpoint
// DeletedDeprecatedEndpoint - A deprecated endpoint has been deleted in the new spec
DeletedDeprecatedEndpoint
// AddedRequiredParam - A required parameter has been added in the new spec
AddedRequiredParam
// DeletedRequiredParam - A required parameter has been deleted in the new spec
DeletedRequiredParam
// ChangedRequiredToOptional - A required parameter has been made optional in the new spec
ChangedRequiredToOptional
// AddedEndpoint - An endpoint has been added in the new spec
AddedEndpoint
// WidenedType - An type has been changed to a more permissive type eg int->string
WidenedType
// NarrowedType - An type has been changed to a less permissive type eg string->int
NarrowedType
// ChangedToCompatibleType - An type has been changed to a compatible type eg password->string
ChangedToCompatibleType
// ChangedType - An type has been changed to a type whose relative compatibility cannot be determined
ChangedType
// AddedEnumValue - An enum type has had a new potential value added to it
AddedEnumValue
// DeletedEnumValue - An enum type has had a existing value removed from it
DeletedEnumValue
// AddedOptionalParam - A new optional parameter has been added to the new spec
AddedOptionalParam
// ChangedOptionalToRequiredParam - An optional parameter is now required in the new spec
ChangedOptionalToRequiredParam
// ChangedRequiredToOptionalParam - An required parameter is now optional in the new spec
ChangedRequiredToOptionalParam
// AddedResponse An endpoint has new response code in the new spec
AddedResponse
// AddedConsumesFormat - a new consumes format (json/xml/yaml etc) has been added in the new spec
AddedConsumesFormat
// DeletedConsumesFormat - an existing format has been removed in the new spec
DeletedConsumesFormat
// AddedProducesFormat - a new produces format (json/xml/yaml etc) has been added in the new spec
AddedProducesFormat
// DeletedProducesFormat - an existing produces format has been removed in the new spec
DeletedProducesFormat
// AddedSchemes - a new scheme has been added to the new spec
AddedSchemes
// DeletedSchemes - a scheme has been removed from the new spec
DeletedSchemes
// ChangedHostURL - the host url has been changed. If this is used in the client generation, then clients will break.
ChangedHostURL
// ChangedBasePath - the host base path has been changed. If this is used in the client generation, then clients will break.
ChangedBasePath
// AddedResponseHeader Added a header Item
AddedResponseHeader
// ChangedResponseHeader Added a header Item
ChangedResponseHeader
// DeletedResponseHeader Added a header Item
DeletedResponseHeader
)
var toLongStringSpecChangeCode = map[SpecChangeCode]string{
NoChangeDetected: "No Change detected",
AddedEndpoint: "Added endpoint",
DeletedEndpoint: "Deleted endpoint",
DeletedDeprecatedEndpoint: "Deleted a deprecated endpoint",
AddedRequiredProperty: "Added required property",
DeletedProperty: "Deleted property",
ChangedDescripton: "Changed a description",
AddedDescripton: "Added a description",
DeletedDescripton: "Deleted a description",
ChangedTag: "Changed a tag",
AddedTag: "Added a tag",
DeletedTag: "Deleted a tag",
AddedProperty: "Added property",
AddedOptionalParam: "Added optional param",
AddedRequiredParam: "Added required param",
DeletedOptionalParam: "Deleted optional param",
DeletedRequiredParam: "Deleted required param",
DeletedResponse: "Deleted response",
AddedResponse: "Added response",
WidenedType: "Widened type",
NarrowedType: "Narrowed type",
ChangedType: "Changed type",
ChangedToCompatibleType: "Changed type to equivalent type",
ChangedOptionalToRequiredParam: "Changed optional param to required",
ChangedRequiredToOptionalParam: "Changed required param to optional",
AddedEnumValue: "Added possible enumeration(s)",
DeletedEnumValue: "Deleted possible enumeration(s)",
AddedConsumesFormat: "Added a consumes format",
DeletedConsumesFormat: "Deleted a consumes format",
AddedProducesFormat: "Added produces format",
DeletedProducesFormat: "Deleted produces format",
AddedSchemes: "Added schemes",
DeletedSchemes: "Deleted schemes",
ChangedHostURL: "Changed host URL",
ChangedBasePath: "Changed base path",
AddedResponseHeader: "Added response header",
ChangedResponseHeader: "Changed response header",
DeletedResponseHeader: "Deleted response header",
}
var toStringSpecChangeCode = map[SpecChangeCode]string{
AddedEndpoint: "AddedEndpoint",
NoChangeDetected: "NoChangeDetected",
DeletedEndpoint: "DeletedEndpoint",
DeletedDeprecatedEndpoint: "DeletedDeprecatedEndpoint",
AddedRequiredProperty: "AddedRequiredProperty",
DeletedProperty: "DeletedProperty",
AddedProperty: "AddedProperty",
ChangedDescripton: "ChangedDescription",
AddedDescripton: "AddedDescription",
DeletedDescripton: "DeletedDescription",
ChangedTag: "ChangedTag",
AddedTag: "AddedTag",
DeletedTag: "DeletedTag",
AddedOptionalParam: "AddedOptionalParam",
AddedRequiredParam: "AddedRequiredParam",
DeletedOptionalParam: "DeletedRequiredParam",
DeletedRequiredParam: "Deleted required param",
DeletedResponse: "DeletedResponse",
AddedResponse: "AddedResponse",
WidenedType: "WidenedType",
NarrowedType: "NarrowedType",
ChangedType: "ChangedType",
ChangedToCompatibleType: "ChangedToCompatibleType",
ChangedOptionalToRequiredParam: "ChangedOptionalToRequiredParam",
ChangedRequiredToOptionalParam: "ChangedRequiredToOptionalParam",
AddedEnumValue: "AddedEnumValue",
DeletedEnumValue: "DeletedEnumValue",
AddedConsumesFormat: "AddedConsumesFormat",
DeletedConsumesFormat: "DeletedConsumesFormat",
AddedProducesFormat: "AddedProducesFormat",
DeletedProducesFormat: "DeletedProducesFormat",
AddedSchemes: "AddedSchemes",
DeletedSchemes: "DeletedSchemes",
ChangedHostURL: "ChangedHostURL",
ChangedBasePath: "ChangedBasePath",
AddedResponseHeader: "AddedResponseHeader",
ChangedResponseHeader: "ChangedResponseHeader",
DeletedResponseHeader: "DeletedResponseHeader",
}
var toIDSpecChangeCode = map[string]SpecChangeCode{}
// Description returns an english version of this error
func (s *SpecChangeCode) Description() (result string) {
result, ok := toLongStringSpecChangeCode[*s]
if !ok {
fmt.Printf("WARNING: No description for %v", *s)
result = "UNDEFINED"
}
return
}
// MarshalJSON marshals the enum as a quoted json string
func (s *SpecChangeCode) MarshalJSON() ([]byte, error) {
return stringAsQuotedBytes(toStringSpecChangeCode[*s])
}
// UnmarshalJSON unmashalls a quoted json string to the enum value
func (s *SpecChangeCode) UnmarshalJSON(b []byte) error {
str, err := readStringFromByteStream(b)
if err != nil {
return err
}
// Note that if the string cannot be found then it will return an error to the caller.
val, ok := toIDSpecChangeCode[str]
if ok {
*s = val
} else {
return fmt.Errorf("unknown enum value. cannot unmarshal '%s'", str)
}
return nil
}
// Compatibility - whether this is a breaking or non-breaking change
type Compatibility int
const (
// Breaking this change could break existing clients
Breaking Compatibility = iota
// NonBreaking This is a backwards-compatible API change
NonBreaking
)
func (s Compatibility) String() string {
return toStringCompatibility[s]
}
var toStringCompatibility = map[Compatibility]string{
Breaking: "Breaking",
NonBreaking: "NonBreaking",
}
var toIDCompatibility = map[string]Compatibility{}
// MarshalJSON marshals the enum as a quoted json string
func (s *Compatibility) MarshalJSON() ([]byte, error) {
return stringAsQuotedBytes(toStringCompatibility[*s])
}
// UnmarshalJSON unmashals a quoted json string to the enum value
func (s *Compatibility) UnmarshalJSON(b []byte) error {
str, err := readStringFromByteStream(b)
if err != nil {
return err
}
// Note that if the string cannot be found then it will return an error to the caller.
val, ok := toIDCompatibility[str]
if ok {
*s = val
} else {
return fmt.Errorf("unknown enum value. cannot unmarshal '%s'", str)
}
return nil
}
func stringAsQuotedBytes(str string) ([]byte, error) {
buffer := bytes.NewBufferString(`"`)
buffer.WriteString(str)
buffer.WriteString(`"`)
return buffer.Bytes(), nil
}
func readStringFromByteStream(b []byte) (string, error) {
var j string
err := json.Unmarshal(b, &j)
if err != nil {
return "", err
}
return j, nil
}
func init() {
for key, val := range toStringSpecChangeCode {
toIDSpecChangeCode[val] = key
}
for key, val := range toStringCompatibility {
toIDCompatibility[val] = key
}
}

View file

@ -0,0 +1,47 @@
package diff
// Node is the position od a diff in a spec
type Node struct {
Field string `json:"name,omitempty"`
TypeName string `json:"type,omitempty"`
IsArray bool `json:"is_array,omitempty"`
ChildNode *Node `json:"child,omitempty"`
}
// String std string render
func (n *Node) String() string {
name := n.Field
if n.IsArray {
name = "array[" + n.TypeName + "]"
}
if n.ChildNode != nil {
return name + "." + n.ChildNode.String()
}
if len(n.TypeName) > 0 {
return name + " : " + n.TypeName
}
return name
}
// AddLeafNode Adds (recursive) a Child to the first non-nil child found
func (n *Node) AddLeafNode(toAdd *Node) *Node {
if n.ChildNode == nil {
n.ChildNode = toAdd
} else {
n.ChildNode.AddLeafNode(toAdd)
}
return n
}
//Copy deep copy of this node and children
func (n Node) Copy() *Node {
newNode := n
if newNode.ChildNode != nil {
n.ChildNode = newNode.ChildNode.Copy()
}
return &newNode
}

View file

@ -0,0 +1,169 @@
package diff
import (
"bytes"
"encoding/json"
"fmt"
"net/url"
"strings"
"github.com/go-openapi/spec"
)
// ArrayType const for array
var ArrayType = "array"
// Compare returns the result of analysing breaking and non breaking changes
// between to Swagger specs
func Compare(spec1, spec2 *spec.Swagger) (diffs SpecDifferences, err error) {
analyser := NewSpecAnalyser()
err = analyser.Analyse(spec1, spec2)
if err != nil {
return nil, err
}
diffs = analyser.Diffs
return
}
// PathItemOp - combines path and operation into a single keyed entity
type PathItemOp struct {
ParentPathItem *spec.PathItem `json:"pathitem"`
Operation *spec.Operation `json:"operation"`
}
// URLMethod - combines url and method into a single keyed entity
type URLMethod struct {
Path string `json:"path"`
Method string `json:"method"`
}
// DataDirection indicates the direction of change Request vs Response
type DataDirection int
const (
// Request Used for messages/param diffs in a request
Request DataDirection = iota
// Response Used for messages/param diffs in a response
Response
)
func getParams(pathParams, opParams []spec.Parameter, location string) map[string]spec.Parameter {
params := map[string]spec.Parameter{}
// add shared path params
for _, eachParam := range pathParams {
if eachParam.In == location {
params[eachParam.Name] = eachParam
}
}
// add any overridden params
for _, eachParam := range opParams {
if eachParam.In == location {
params[eachParam.Name] = eachParam
}
}
return params
}
func getNameOnlyDiffNode(forLocation string) *Node {
node := Node{
Field: forLocation,
}
return &node
}
func getSimpleSchemaDiffNode(name string, schema *spec.SimpleSchema) *Node {
node := Node{
Field: name,
}
if schema != nil {
node.TypeName, node.IsArray = getSimpleSchemaType(schema)
}
return &node
}
func getSchemaDiffNode(name string, schema *spec.Schema) *Node {
node := Node{
Field: name,
}
if schema != nil {
node.TypeName, node.IsArray = getSchemaType(&schema.SchemaProps)
}
return &node
}
func definitonFromURL(url *url.URL) string {
if url == nil {
return ""
}
fragmentParts := strings.Split(url.Fragment, "/")
numParts := len(fragmentParts)
if numParts == 0 {
return ""
}
return fragmentParts[numParts-1]
}
func getSimpleSchemaType(schema *spec.SimpleSchema) (typeName string, isArray bool) {
typeName = schema.Type
if typeName == ArrayType {
typeName, _ = getSimpleSchemaType(&schema.Items.SimpleSchema)
return typeName, true
}
return typeName, false
}
func getSchemaType(schema *spec.SchemaProps) (typeName string, isArray bool) {
refStr := definitonFromURL(schema.Ref.GetURL())
if len(refStr) > 0 {
return refStr, false
}
typeName = schema.Type[0]
if typeName == ArrayType {
typeName, _ = getSchemaType(&schema.Items.Schema.SchemaProps)
return typeName, true
}
return typeName, false
}
func primitiveTypeString(typeName, typeFormat string) string {
if typeFormat != "" {
return fmt.Sprintf("%s.%s", typeName, typeFormat)
}
return typeName
}
// TypeDiff - describes a primitive type change
type TypeDiff struct {
Change SpecChangeCode `json:"change-type,omitempty"`
Description string `json:"description,omitempty"`
FromType string `json:"from-type,omitempty"`
ToType string `json:"to-type,omitempty"`
}
// didn't use 'width' so as not to confuse with bit width
var numberWideness = map[string]int{
"number": 3,
"number.double": 3,
"double": 3,
"number.float": 2,
"float": 2,
"long": 1,
"integer.int64": 1,
"integer": 0,
"integer.int32": 0,
}
func prettyprint(b []byte) ([]byte, error) {
var out bytes.Buffer
err := json.Indent(&out, b, "", " ")
return out.Bytes(), err
}
// JSONMarshal allows the item to be correctly rendered to json
func JSONMarshal(t interface{}) ([]byte, error) {
buffer := &bytes.Buffer{}
encoder := json.NewEncoder(buffer)
encoder.SetEscapeHTML(false)
err := encoder.Encode(t)
return buffer.Bytes(), err
}

View file

@ -0,0 +1,654 @@
package diff
import (
"fmt"
"strings"
"github.com/go-openapi/spec"
)
const StringType = "string"
// URLMethodResponse encapsulates these three elements to act as a map key
type URLMethodResponse struct {
Path string `json:"path"`
Method string `json:"method"`
Response string `json:"response"`
}
// MarshalText - for serializing as a map key
func (p URLMethod) MarshalText() (text []byte, err error) {
return []byte(fmt.Sprintf("%s %s", p.Path, p.Method)), nil
}
// URLMethods allows iteration of endpoints based on url and method
type URLMethods map[URLMethod]*PathItemOp
// SpecAnalyser contains all the differences for a Spec
type SpecAnalyser struct {
Diffs SpecDifferences
urlMethods1 URLMethods
urlMethods2 URLMethods
Definitions1 spec.Definitions
Definitions2 spec.Definitions
AlreadyComparedDefinitions map[string]bool
}
// NewSpecAnalyser returns an empty SpecDiffs
func NewSpecAnalyser() *SpecAnalyser {
return &SpecAnalyser{
Diffs: SpecDifferences{},
}
}
// Analyse the differences in two specs
func (sd *SpecAnalyser) Analyse(spec1, spec2 *spec.Swagger) error {
sd.Definitions1 = spec1.Definitions
sd.Definitions2 = spec2.Definitions
sd.urlMethods1 = getURLMethodsFor(spec1)
sd.urlMethods2 = getURLMethodsFor(spec2)
sd.analyseSpecMetadata(spec1, spec2)
sd.analyseEndpoints()
sd.analyseParams()
sd.analyseEndpointData()
sd.analyseResponseParams()
return nil
}
func (sd *SpecAnalyser) analyseSpecMetadata(spec1, spec2 *spec.Swagger) {
// breaking if it no longer consumes any formats
added, deleted, _ := FromStringArray(spec1.Consumes).DiffsTo(spec2.Consumes)
node := getNameOnlyDiffNode("Spec")
location := DifferenceLocation{Node: node}
consumesLoation := location.AddNode(getNameOnlyDiffNode("consumes"))
for _, eachAdded := range added {
sd.Diffs = sd.Diffs.addDiff(
SpecDifference{DifferenceLocation: consumesLoation, Code: AddedConsumesFormat, Compatibility: NonBreaking, DiffInfo: eachAdded})
}
for _, eachDeleted := range deleted {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: consumesLoation, Code: DeletedConsumesFormat, Compatibility: Breaking, DiffInfo: eachDeleted})
}
// // breaking if it no longer produces any formats
added, deleted, _ = FromStringArray(spec1.Produces).DiffsTo(spec2.Produces)
producesLocation := location.AddNode(getNameOnlyDiffNode("produces"))
for _, eachAdded := range added {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: producesLocation, Code: AddedProducesFormat, Compatibility: NonBreaking, DiffInfo: eachAdded})
}
for _, eachDeleted := range deleted {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: producesLocation, Code: DeletedProducesFormat, Compatibility: Breaking, DiffInfo: eachDeleted})
}
// // breaking if it no longer supports a scheme
added, deleted, _ = FromStringArray(spec1.Schemes).DiffsTo(spec2.Schemes)
schemesLocation := location.AddNode(getNameOnlyDiffNode("schemes"))
for _, eachAdded := range added {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: schemesLocation, Code: AddedSchemes, Compatibility: NonBreaking, DiffInfo: eachAdded})
}
for _, eachDeleted := range deleted {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: schemesLocation, Code: DeletedSchemes, Compatibility: Breaking, DiffInfo: eachDeleted})
}
// // host should be able to change without any issues?
sd.analyseMetaDataProperty(spec1.Info.Description, spec2.Info.Description, ChangedDescripton, NonBreaking)
// // host should be able to change without any issues?
sd.analyseMetaDataProperty(spec1.Host, spec2.Host, ChangedHostURL, Breaking)
// sd.Host = compareStrings(spec1.Host, spec2.Host)
// // Base Path change will break non generated clients
sd.analyseMetaDataProperty(spec1.BasePath, spec2.BasePath, ChangedBasePath, Breaking)
// TODO: what to do about security?
// Missing security scheme will break a client
// Security []map[string][]string `json:"security,omitempty"`
// Tags []Tag `json:"tags,omitempty"`
// ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
}
func (sd *SpecAnalyser) analyseEndpoints() {
sd.findDeletedEndpoints()
sd.findAddedEndpoints()
}
func (sd *SpecAnalyser) analyseEndpointData() {
for URLMethod, op2 := range sd.urlMethods2 {
if op1, ok := sd.urlMethods1[URLMethod]; ok {
addedTags, deletedTags, _ := FromStringArray(op1.Operation.Tags).DiffsTo(op2.Operation.Tags)
location := DifferenceLocation{URL: URLMethod.Path, Method: URLMethod.Method}
for _, eachAddedTag := range addedTags {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: AddedTag, DiffInfo: eachAddedTag})
}
for _, eachDeletedTag := range deletedTags {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: DeletedTag, DiffInfo: eachDeletedTag})
}
sd.compareDescripton(location, op1.Operation.Description, op2.Operation.Description)
}
}
}
func (sd *SpecAnalyser) analyseParams() {
locations := []string{"query", "path", "body", "header"}
for _, paramLocation := range locations {
rootNode := getNameOnlyDiffNode(strings.Title(paramLocation))
for URLMethod, op2 := range sd.urlMethods2 {
if op1, ok := sd.urlMethods1[URLMethod]; ok {
params1 := getParams(op1.ParentPathItem.Parameters, op1.Operation.Parameters, paramLocation)
params2 := getParams(op2.ParentPathItem.Parameters, op2.Operation.Parameters, paramLocation)
location := DifferenceLocation{URL: URLMethod.Path, Method: URLMethod.Method, Node: rootNode}
// detect deleted params
for paramName1, param1 := range params1 {
if _, ok := params2[paramName1]; !ok {
childLocation := location.AddNode(getSchemaDiffNode(paramName1, param1.Schema))
code := DeletedOptionalParam
if param1.Required {
code = DeletedRequiredParam
}
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: childLocation, Code: code})
}
}
// detect added changed params
for paramName2, param2 := range params2 {
//changed?
if param1, ok := params1[paramName2]; ok {
sd.compareParams(URLMethod, paramLocation, paramName2, param1, param2)
} else {
// Added
childLocation := location.AddNode(getSchemaDiffNode(paramName2, param2.Schema))
code := AddedOptionalParam
if param2.Required {
code = AddedRequiredParam
}
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: childLocation, Code: code})
}
}
}
}
}
}
func (sd *SpecAnalyser) analyseResponseParams() {
// Loop through url+methods in spec 2 - check deleted and changed
for URLMethod2, op2 := range sd.urlMethods2 {
if op1, ok := sd.urlMethods1[URLMethod2]; ok {
// compare responses for url and method
op1Responses := op1.Operation.Responses.StatusCodeResponses
op2Responses := op2.Operation.Responses.StatusCodeResponses
// deleted responses
for code1 := range op1Responses {
if _, ok := op2Responses[code1]; !ok {
location := DifferenceLocation{URL: URLMethod2.Path, Method: URLMethod2.Method, Response: code1}
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: DeletedResponse})
}
}
// Added updated Response Codes
for code2, op2Response := range op2Responses {
if op1Response, ok := op1Responses[code2]; ok {
op1Headers := op1Response.ResponseProps.Headers
headerRootNode := getNameOnlyDiffNode("Headers")
location := DifferenceLocation{URL: URLMethod2.Path, Method: URLMethod2.Method, Response: code2, Node: headerRootNode}
// Iterate Spec2 Headers looking for added and updated
for op2HeaderName, op2Header := range op2Response.ResponseProps.Headers {
if op1Header, ok := op1Headers[op2HeaderName]; ok {
sd.compareSimpleSchema(location.AddNode(getNameOnlyDiffNode(op2HeaderName)),
&op1Header.SimpleSchema,
&op2Header.SimpleSchema, false, false)
} else {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{
DifferenceLocation: location.AddNode(getNameOnlyDiffNode(op2HeaderName)),
Code: AddedResponseHeader})
}
}
for op1HeaderName := range op1Response.ResponseProps.Headers {
if _, ok := op2Response.ResponseProps.Headers[op1HeaderName]; !ok {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{
DifferenceLocation: location.AddNode(getNameOnlyDiffNode(op1HeaderName)),
Code: DeletedResponseHeader})
}
}
responseLocation := DifferenceLocation{URL: URLMethod2.Path, Method: URLMethod2.Method, Response: code2}
sd.compareDescripton(responseLocation, op1Response.Description, op2Response.Description)
if op1Response.Schema != nil {
sd.compareSchema(
DifferenceLocation{URL: URLMethod2.Path, Method: URLMethod2.Method, Response: code2},
op1Response.Schema,
op2Response.Schema, true, true)
}
} else {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{
DifferenceLocation: DifferenceLocation{URL: URLMethod2.Path, Method: URLMethod2.Method, Response: code2},
Code: AddedResponse})
}
}
}
}
}
func addTypeDiff(diffs []TypeDiff, diff TypeDiff) []TypeDiff {
if diff.Change != NoChangeDetected {
diffs = append(diffs, diff)
}
return diffs
}
// CheckToFromPrimitiveType check for diff to or from a primitive
func (sd *SpecAnalyser) CheckToFromPrimitiveType(diffs []TypeDiff, type1, type2 spec.SchemaProps) []TypeDiff {
type1IsPrimitive := len(type1.Type) > 0
type2IsPrimitive := len(type2.Type) > 0
// Primitive to Obj or Obj to Primitive
if type1IsPrimitive && !type2IsPrimitive {
return addTypeDiff(diffs, TypeDiff{Change: ChangedType, FromType: type1.Type[0], ToType: "obj"})
}
if !type1IsPrimitive && type2IsPrimitive {
return addTypeDiff(diffs, TypeDiff{Change: ChangedType, FromType: type2.Type[0], ToType: "obj"})
}
return diffs
}
// CheckToFromArrayType check for changes to or from an Array type
func (sd *SpecAnalyser) CheckToFromArrayType(diffs []TypeDiff, type1, type2 spec.SchemaProps) []TypeDiff {
// Single to Array or Array to Single
type1Array := type1.Type[0] == ArrayType
type2Array := type2.Type[0] == ArrayType
if type1Array && !type2Array {
return addTypeDiff(diffs, TypeDiff{Change: ChangedType, FromType: "obj", ToType: type2.Type[0]})
}
if !type1Array && type2Array {
return addTypeDiff(diffs, TypeDiff{Change: ChangedType, FromType: type1.Type[0], ToType: ArrayType})
}
if type1Array && type2Array {
// array
// TODO: Items??
diffs = addTypeDiff(diffs, compareIntValues("MaxItems", type1.MaxItems, type2.MaxItems, WidenedType, NarrowedType))
diffs = addTypeDiff(diffs, compareIntValues("MinItems", type1.MinItems, type2.MinItems, NarrowedType, WidenedType))
}
return diffs
}
// CheckStringTypeChanges checks for changes to or from a string type
func (sd *SpecAnalyser) CheckStringTypeChanges(diffs []TypeDiff, type1, type2 spec.SchemaProps) []TypeDiff {
// string changes
if type1.Type[0] == StringType &&
type2.Type[0] == StringType {
diffs = addTypeDiff(diffs, compareIntValues("MinLength", type1.MinLength, type2.MinLength, NarrowedType, WidenedType))
diffs = addTypeDiff(diffs, compareIntValues("MaxLength", type1.MinLength, type2.MinLength, WidenedType, NarrowedType))
if type1.Pattern != type2.Pattern {
diffs = addTypeDiff(diffs, TypeDiff{Change: ChangedType, Description: fmt.Sprintf("Pattern Changed:%s->%s", type1.Pattern, type2.Pattern)})
}
if type1.Type[0] == StringType {
if len(type1.Enum) > 0 {
enumDiffs := sd.compareEnums(type1.Enum, type2.Enum)
diffs = append(diffs, enumDiffs...)
}
}
}
return diffs
}
// CheckNumericTypeChanges checks for changes to or from a numeric type
func (sd *SpecAnalyser) CheckNumericTypeChanges(diffs []TypeDiff, type1, type2 spec.SchemaProps) []TypeDiff {
// Number
_, type1IsNumeric := numberWideness[type1.Type[0]]
_, type2IsNumeric := numberWideness[type2.Type[0]]
if type1IsNumeric && type2IsNumeric {
diffs = addTypeDiff(diffs, compareFloatValues("Maximum", type1.Maximum, type2.Maximum, WidenedType, NarrowedType))
diffs = addTypeDiff(diffs, compareFloatValues("Minimum", type1.Minimum, type2.Minimum, NarrowedType, WidenedType))
if type1.ExclusiveMaximum && !type2.ExclusiveMaximum {
diffs = addTypeDiff(diffs, TypeDiff{Change: WidenedType, Description: fmt.Sprintf("Exclusive Maximum Removed:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
}
if !type1.ExclusiveMaximum && type2.ExclusiveMaximum {
diffs = addTypeDiff(diffs, TypeDiff{Change: NarrowedType, Description: fmt.Sprintf("Exclusive Maximum Added:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
}
if type1.ExclusiveMinimum && !type2.ExclusiveMinimum {
diffs = addTypeDiff(diffs, TypeDiff{Change: WidenedType, Description: fmt.Sprintf("Exclusive Minimum Removed:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
}
if !type1.ExclusiveMinimum && type2.ExclusiveMinimum {
diffs = addTypeDiff(diffs, TypeDiff{Change: NarrowedType, Description: fmt.Sprintf("Exclusive Minimum Added:%v->%v", type1.ExclusiveMinimum, type2.ExclusiveMinimum)})
}
}
return diffs
}
// CompareTypes computes type specific property diffs
func (sd *SpecAnalyser) CompareTypes(type1, type2 spec.SchemaProps) []TypeDiff {
diffs := []TypeDiff{}
diffs = sd.CheckToFromPrimitiveType(diffs, type1, type2)
if len(diffs) > 0 {
return diffs
}
diffs = sd.CheckToFromArrayType(diffs, type1, type2)
if len(diffs) > 0 {
return diffs
}
// check type hierarchy change eg string -> integer = NarrowedChange
//Type
//Format
if type1.Type[0] != type2.Type[0] ||
type1.Format != type2.Format {
diff := getTypeHierarchyChange(primitiveTypeString(type1.Type[0], type1.Format), primitiveTypeString(type2.Type[0], type2.Format))
diffs = addTypeDiff(diffs, diff)
}
diffs = sd.CheckStringTypeChanges(diffs, type1, type2)
if len(diffs) > 0 {
return diffs
}
diffs = sd.CheckNumericTypeChanges(diffs, type1, type2)
if len(diffs) > 0 {
return diffs
}
return diffs
}
func (sd *SpecAnalyser) compareParams(urlMethod URLMethod, location string, name string, param1, param2 spec.Parameter) {
diffLocation := DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method}
childLocation := diffLocation.AddNode(getNameOnlyDiffNode(strings.Title(location)))
paramLocation := diffLocation.AddNode(getNameOnlyDiffNode(name))
sd.compareDescripton(paramLocation, param1.Description, param2.Description)
if param1.Schema != nil && param2.Schema != nil {
childLocation = childLocation.AddNode(getSchemaDiffNode(name, param2.Schema))
sd.compareSchema(childLocation, param1.Schema, param2.Schema, param1.Required, param2.Required)
}
diffs := sd.CompareTypes(forParam(param1), forParam(param2))
childLocation = childLocation.AddNode(getSchemaDiffNode(name, param2.Schema))
for _, eachDiff := range diffs {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{
DifferenceLocation: childLocation,
Code: eachDiff.Change,
DiffInfo: eachDiff.Description})
}
if param1.Required != param2.Required {
code := ChangedRequiredToOptionalParam
if param2.Required {
code = ChangedOptionalToRequiredParam
}
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: childLocation, Code: code})
}
}
func (sd *SpecAnalyser) compareSimpleSchema(location DifferenceLocation, schema1, schema2 *spec.SimpleSchema, required1, required2 bool) {
if schema1 == nil || schema2 == nil {
return
}
if schema1.Type == ArrayType {
refSchema1 := schema1.Items.SimpleSchema
refSchema2 := schema2.Items.SimpleSchema
childLocation := location.AddNode(getSimpleSchemaDiffNode("", schema1))
sd.compareSimpleSchema(childLocation, &refSchema1, &refSchema2, required1, required2)
return
}
if required1 != required2 {
code := AddedRequiredProperty
if required1 {
code = ChangedRequiredToOptional
}
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: code})
}
}
func (sd *SpecAnalyser) compareDescripton(location DifferenceLocation, desc1, desc2 string) {
if desc1 != desc2 {
code := ChangedDescripton
if len(desc1) > 0 {
code = DeletedDescripton
} else if len(desc2) > 0 {
code = AddedDescripton
}
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: code})
}
}
func (sd *SpecAnalyser) compareSchema(location DifferenceLocation, schema1, schema2 *spec.Schema, required1, required2 bool) {
if schema1 == nil || schema2 == nil {
return
}
sd.compareDescripton(location, schema1.Description, schema2.Description)
if len(schema1.Type) == 0 {
refSchema1, definition1 := sd.schemaFromRef(schema1, &sd.Definitions1)
refSchema2, definition2 := sd.schemaFromRef(schema2, &sd.Definitions2)
if len(definition1) > 0 {
info := fmt.Sprintf("[%s -> %s]", definition1, definition2)
if definition1 != definition2 {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location,
Code: ChangedType,
DiffInfo: info,
})
}
sd.compareSchema(location, refSchema1, refSchema2, required1, required2)
return
}
} else {
if schema1.Type[0] == ArrayType {
refSchema1, definition1 := sd.schemaFromRef(schema1.Items.Schema, &sd.Definitions1)
refSchema2, _ := sd.schemaFromRef(schema2.Items.Schema, &sd.Definitions2)
if len(definition1) > 0 {
childLocation := location.AddNode(getSchemaDiffNode("", schema1))
sd.compareSchema(childLocation, refSchema1, refSchema2, required1, required2)
return
}
}
diffs := sd.CompareTypes(schema1.SchemaProps, schema2.SchemaProps)
for _, eachTypeDiff := range diffs {
if eachTypeDiff.Change != NoChangeDetected {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: eachTypeDiff.Change, DiffInfo: eachTypeDiff.Description})
}
}
}
if required1 != required2 {
code := AddedRequiredProperty
if required1 {
code = ChangedRequiredToOptional
}
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: code})
}
requiredProps2 := sliceToStrMap(schema2.Required)
requiredProps1 := sliceToStrMap(schema1.Required)
schema1Props := sd.propertiesFor(schema1, &sd.Definitions1)
schema2Props := sd.propertiesFor(schema2, &sd.Definitions2)
// find deleted and changed properties
for eachProp1Name, eachProp1 := range schema1Props {
eachProp1 := eachProp1
_, required1 := requiredProps1[eachProp1Name]
_, required2 := requiredProps2[eachProp1Name]
childLoc := sd.addChildDiffNode(location, eachProp1Name, &eachProp1)
if eachProp2, ok := schema2Props[eachProp1Name]; ok {
sd.compareSchema(childLoc, &eachProp1, &eachProp2, required1, required2)
sd.compareDescripton(childLoc, eachProp1.Description, eachProp2.Description)
} else {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: childLoc, Code: DeletedProperty})
}
}
// find added properties
for eachProp2Name, eachProp2 := range schema2.Properties {
eachProp2 := eachProp2
if _, ok := schema1.Properties[eachProp2Name]; !ok {
childLoc := sd.addChildDiffNode(location, eachProp2Name, &eachProp2)
_, required2 := requiredProps2[eachProp2Name]
code := AddedProperty
if required2 {
code = AddedRequiredProperty
}
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: childLoc, Code: code})
}
}
}
func (sd *SpecAnalyser) addChildDiffNode(location DifferenceLocation, propName string, propSchema *spec.Schema) DifferenceLocation {
newLoc := location
if newLoc.Node != nil {
newLoc.Node = newLoc.Node.Copy()
}
childNode := sd.fromSchemaProps(propName, &propSchema.SchemaProps)
if newLoc.Node != nil {
newLoc.Node.AddLeafNode(&childNode)
} else {
newLoc.Node = &childNode
}
return newLoc
}
func (sd *SpecAnalyser) fromSchemaProps(fieldName string, props *spec.SchemaProps) Node {
node := Node{}
node.IsArray = props.Type[0] == ArrayType
if !node.IsArray {
node.TypeName = props.Type[0]
}
node.Field = fieldName
return node
}
func (sd *SpecAnalyser) compareEnums(left, right []interface{}) []TypeDiff {
diffs := []TypeDiff{}
leftStrs := []string{}
rightStrs := []string{}
for _, eachLeft := range left {
leftStrs = append(leftStrs, fmt.Sprintf("%v", eachLeft))
}
for _, eachRight := range right {
rightStrs = append(rightStrs, fmt.Sprintf("%v", eachRight))
}
added, deleted, _ := FromStringArray(leftStrs).DiffsTo(rightStrs)
if len(added) > 0 {
typeChange := strings.Join(added, ",")
diffs = append(diffs, TypeDiff{Change: AddedEnumValue, Description: typeChange})
}
if len(deleted) > 0 {
typeChange := strings.Join(deleted, ",")
diffs = append(diffs, TypeDiff{Change: DeletedEnumValue, Description: typeChange})
}
return diffs
}
func (sd *SpecAnalyser) findAddedEndpoints() {
for URLMethod := range sd.urlMethods2 {
if _, ok := sd.urlMethods1[URLMethod]; !ok {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: DifferenceLocation{URL: URLMethod.Path, Method: URLMethod.Method}, Code: AddedEndpoint})
}
}
}
func (sd *SpecAnalyser) findDeletedEndpoints() {
for eachURLMethod, operation1 := range sd.urlMethods1 {
code := DeletedEndpoint
if (operation1.ParentPathItem.Options != nil && operation1.ParentPathItem.Options.Deprecated) ||
(operation1.Operation.Deprecated) {
code = DeletedDeprecatedEndpoint
}
if _, ok := sd.urlMethods2[eachURLMethod]; !ok {
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: DifferenceLocation{URL: eachURLMethod.Path, Method: eachURLMethod.Method}, Code: code})
}
}
}
func (sd *SpecAnalyser) analyseMetaDataProperty(item1, item2 string, codeIfDiff SpecChangeCode, compatIfDiff Compatibility) {
if item1 != item2 {
diffSpec := fmt.Sprintf("%s -> %s", item1, item2)
sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: DifferenceLocation{Node: &Node{Field: "Spec Metadata"}}, Code: codeIfDiff, Compatibility: compatIfDiff, DiffInfo: diffSpec})
}
}
func (sd *SpecAnalyser) schemaFromRef(schema *spec.Schema, defns *spec.Definitions) (actualSchema *spec.Schema, definitionName string) {
ref := schema.Ref
url := ref.GetURL()
if url == nil {
return schema, ""
}
fragmentParts := strings.Split(url.Fragment, "/")
numParts := len(fragmentParts)
if numParts == 0 {
return schema, ""
}
definitionName = fragmentParts[numParts-1]
foundSchema, ok := (*defns)[definitionName]
if !ok {
return nil, definitionName
}
actualSchema = &foundSchema
return
}
func (sd *SpecAnalyser) propertiesFor(schema *spec.Schema, defns *spec.Definitions) map[string]spec.Schema {
schemaFromRef, _ := sd.schemaFromRef(schema, defns)
schema = schemaFromRef
props := map[string]spec.Schema{}
if schema.Properties != nil {
for name, prop := range schema.Properties {
prop := prop
eachProp, _ := sd.schemaFromRef(&prop, defns)
props[name] = *eachProp
}
}
for _, eachAllOf := range schema.AllOf {
eachAllOf := eachAllOf
eachAllOfActual, _ := sd.schemaFromRef(&eachAllOf, defns)
for name, prop := range eachAllOfActual.Properties {
prop := prop
eachProp, _ := sd.schemaFromRef(&prop, defns)
props[name] = *eachProp
}
}
return props
}

View file

@ -0,0 +1,190 @@
package diff
import (
"fmt"
"log"
"sort"
)
// SpecDifference encapsulates the details of an individual diff in part of a spec
type SpecDifference struct {
DifferenceLocation DifferenceLocation `json:"location"`
Code SpecChangeCode `json:"code"`
Compatibility Compatibility `json:"compatibility"`
DiffInfo string `json:"info,omitempty"`
}
// SpecDifferences list of differences
type SpecDifferences []SpecDifference
// Matches returns true if the diff matches another
func (sd SpecDifference) Matches(other SpecDifference) bool {
return sd.Code == other.Code &&
sd.Compatibility == other.Compatibility &&
sd.DiffInfo == other.DiffInfo &&
equalLocations(sd.DifferenceLocation, other.DifferenceLocation)
}
func equalLocations(a, b DifferenceLocation) bool {
return a.Method == b.Method &&
a.Response == b.Response &&
a.URL == b.URL &&
equalNodes(a.Node, b.Node)
}
func equalNodes(a, b *Node) bool {
if a == nil && b == nil {
return true
}
if a == nil || b == nil {
return false
}
return a.Field == b.Field &&
a.IsArray == b.IsArray &&
a.TypeName == b.TypeName &&
equalNodes(a.ChildNode, b.ChildNode)
}
// BreakingChangeCount Calculates the breaking change count
func (sd SpecDifferences) BreakingChangeCount() int {
count := 0
for _, eachDiff := range sd {
if eachDiff.Compatibility == Breaking {
count++
}
}
return count
}
// FilterIgnores returns a copy of the list without the items in the specified ignore list
func (sd SpecDifferences) FilterIgnores(ignores SpecDifferences) SpecDifferences {
newDiffs := SpecDifferences{}
for _, eachDiff := range sd {
if !ignores.Contains(eachDiff) {
newDiffs = newDiffs.addDiff(eachDiff)
}
}
return newDiffs
}
// Contains Returns true if the item contains the specified item
func (sd SpecDifferences) Contains(diff SpecDifference) bool {
for _, eachDiff := range sd {
if eachDiff.Matches(diff) {
return true
}
}
return false
}
// String std string renderer
func (sd SpecDifference) String() string {
isResponse := sd.DifferenceLocation.Response > 0
hasMethod := len(sd.DifferenceLocation.Method) > 0
hasURL := len(sd.DifferenceLocation.URL) > 0
prefix := ""
direction := ""
if isResponse {
direction = " Response"
if hasURL {
if hasMethod {
prefix = fmt.Sprintf("%s:%s -> %d", sd.DifferenceLocation.URL, sd.DifferenceLocation.Method, sd.DifferenceLocation.Response)
} else {
prefix = fmt.Sprintf("%s ", sd.DifferenceLocation.URL)
}
}
} else {
if hasURL {
if hasMethod {
direction = " Request"
prefix = fmt.Sprintf("%s:%s", sd.DifferenceLocation.URL, sd.DifferenceLocation.Method)
} else {
prefix = fmt.Sprintf("%s ", sd.DifferenceLocation.URL)
}
} else {
prefix = " Metadata"
}
}
paramOrPropertyLocation := ""
if sd.DifferenceLocation.Node != nil {
paramOrPropertyLocation = " - " + sd.DifferenceLocation.Node.String() + " "
}
optionalInfo := ""
if sd.DiffInfo != "" {
optionalInfo = fmt.Sprintf(" <%s>", sd.DiffInfo)
}
return fmt.Sprintf("%s%s%s- %s%s", prefix, direction, paramOrPropertyLocation, sd.Code.Description(), optionalInfo)
}
func (sd SpecDifferences) addDiff(diff SpecDifference) SpecDifferences {
context := Request
if diff.DifferenceLocation.Response > 0 {
context = Response
}
diff.Compatibility = getCompatibilityForChange(diff.Code, context)
return append(sd, diff)
}
// ReportCompatibility lists and spec
func (sd *SpecDifferences) ReportCompatibility() error {
breakingCount := sd.BreakingChangeCount()
if breakingCount > 0 {
fmt.Printf("\nBREAKING CHANGES:\n=================\n")
sd.reportChanges(Breaking)
return fmt.Errorf("compatibility Test FAILED: %d Breaking changes detected", breakingCount)
}
log.Printf("Compatibility test OK. No breaking changes identified.")
return nil
}
func (sd SpecDifferences) reportChanges(compat Compatibility) {
toReportList := []string{}
for _, diff := range sd {
if diff.Compatibility == compat {
toReportList = append(toReportList, diff.String())
}
}
sort.Slice(toReportList, func(i, j int) bool {
return toReportList[i] < toReportList[j]
})
for _, eachDiff := range toReportList {
fmt.Println(eachDiff)
}
}
// ReportAllDiffs lists all the diffs between two specs
func (sd SpecDifferences) ReportAllDiffs(fmtJSON bool) error {
if fmtJSON {
b, err := JSONMarshal(sd)
if err != nil {
log.Fatalf("Couldn't print results: %v", err)
}
pretty, err := prettyprint(b)
if err != nil {
log.Fatalf("Couldn't print results: %v", err)
}
fmt.Println(string(pretty))
return nil
}
numDiffs := len(sd)
if numDiffs == 0 {
fmt.Println("No changes identified")
return nil
}
if numDiffs != sd.BreakingChangeCount() {
fmt.Println("NON-BREAKING CHANGES:\n=====================")
sd.reportChanges(NonBreaking)
}
return sd.ReportCompatibility()
}

View file

@ -0,0 +1,170 @@
package diff
import (
"fmt"
"github.com/go-openapi/spec"
)
func forItems(items *spec.Items) *spec.Schema {
if items == nil {
return nil
}
valids := items.CommonValidations
schema := spec.Schema{
SchemaProps: spec.SchemaProps{
Type: []string{items.SimpleSchema.Type},
Format: items.SimpleSchema.Format,
Maximum: valids.Maximum,
ExclusiveMaximum: valids.ExclusiveMaximum,
Minimum: valids.Minimum,
ExclusiveMinimum: valids.ExclusiveMinimum,
MaxLength: valids.MaxLength,
MinLength: valids.MinLength,
Pattern: valids.Pattern,
MaxItems: valids.MaxItems,
MinItems: valids.MinItems,
UniqueItems: valids.UniqueItems,
MultipleOf: valids.MultipleOf,
Enum: valids.Enum,
},
}
return &schema
}
func forParam(param spec.Parameter) spec.SchemaProps {
return spec.SchemaProps{
Type: []string{param.Type},
Format: param.Format,
Items: &spec.SchemaOrArray{Schema: forItems(param.Items)},
Maximum: param.Maximum,
ExclusiveMaximum: param.ExclusiveMaximum,
Minimum: param.Minimum,
ExclusiveMinimum: param.ExclusiveMinimum,
MaxLength: param.MaxLength,
MinLength: param.MinLength,
Pattern: param.Pattern,
MaxItems: param.MaxItems,
MinItems: param.MinItems,
UniqueItems: param.UniqueItems,
MultipleOf: param.MultipleOf,
Enum: param.Enum,
}
}
// OperationMap saves indexing operations in PathItems individually
type OperationMap map[string]*spec.Operation
func toMap(item *spec.PathItem) OperationMap {
m := make(OperationMap)
if item.Post != nil {
m["post"] = item.Post
}
if item.Get != nil {
m["get"] = item.Get
}
if item.Put != nil {
m["put"] = item.Put
}
if item.Patch != nil {
m["patch"] = item.Patch
}
if item.Head != nil {
m["head"] = item.Head
}
if item.Options != nil {
m["options"] = item.Options
}
if item.Delete != nil {
m["delete"] = item.Delete
}
return m
}
func getURLMethodsFor(spec *spec.Swagger) URLMethods {
returnURLMethods := URLMethods{}
for url, eachPath := range spec.Paths.Paths {
eachPath := eachPath
opsMap := toMap(&eachPath)
for method, op := range opsMap {
returnURLMethods[URLMethod{url, method}] = &PathItemOp{&eachPath, op}
}
}
return returnURLMethods
}
func sliceToStrMap(elements []string) map[string]bool {
elementMap := make(map[string]bool)
for _, s := range elements {
elementMap[s] = true
}
return elementMap
}
func isStringType(typeName string) bool {
return typeName == "string" || typeName == "password"
}
const objType = "obj"
func getTypeHierarchyChange(type1, type2 string) TypeDiff {
if type1 == type2 {
return TypeDiff{Change: NoChangeDetected, Description: ""}
}
fromType := type1
if fromType == "" {
fromType = objType
}
toType := type2
if toType == "" {
toType = objType
}
diffDescription := fmt.Sprintf("%s -> %s", fromType, toType)
if isStringType(type1) && !isStringType(type2) {
return TypeDiff{Change: NarrowedType, Description: diffDescription}
}
if !isStringType(type1) && isStringType(type2) {
return TypeDiff{Change: WidenedType, Description: diffDescription}
}
type1Wideness, type1IsNumeric := numberWideness[type1]
type2Wideness, type2IsNumeric := numberWideness[type2]
if type1IsNumeric && type2IsNumeric {
if type1Wideness == type2Wideness {
return TypeDiff{Change: ChangedToCompatibleType, Description: diffDescription}
}
if type1Wideness > type2Wideness {
return TypeDiff{Change: NarrowedType, Description: diffDescription}
}
if type1Wideness < type2Wideness {
return TypeDiff{Change: WidenedType, Description: diffDescription}
}
}
return TypeDiff{Change: ChangedType, Description: diffDescription}
}
func compareFloatValues(fieldName string, val1 *float64, val2 *float64, ifGreaterCode SpecChangeCode, ifLessCode SpecChangeCode) TypeDiff {
if val1 != nil && val2 != nil {
if *val2 > *val1 {
return TypeDiff{Change: ifGreaterCode, Description: fmt.Sprintf("%s %f->%f", fieldName, *val1, *val2)}
}
if *val2 < *val1 {
return TypeDiff{Change: ifLessCode, Description: fmt.Sprintf("%s %f->%f", fieldName, *val1, *val2)}
}
}
return TypeDiff{Change: NoChangeDetected, Description: ""}
}
func compareIntValues(fieldName string, val1 *int64, val2 *int64, ifGreaterCode SpecChangeCode, ifLessCode SpecChangeCode) TypeDiff {
if val1 != nil && val2 != nil {
if *val2 > *val1 {
return TypeDiff{Change: ifGreaterCode, Description: fmt.Sprintf("%s %d->%d", fieldName, *val1, *val2)}
}
if *val2 < *val1 {
return TypeDiff{Change: ifLessCode, Description: fmt.Sprintf("%s %d->%d", fieldName, *val1, *val2)}
}
}
return TypeDiff{Change: NoChangeDetected, Description: ""}
}

View file

@ -0,0 +1,73 @@
package commands
import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"github.com/go-openapi/loads"
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
flags "github.com/jessevdk/go-flags"
yaml "gopkg.in/yaml.v2"
)
// ExpandSpec is a command that expands the $refs in a swagger document.
//
// There are no specific options for this expansion.
type ExpandSpec struct {
Compact bool `long:"compact" description:"applies to JSON formatted specs. When present, doesn't prettify the json"`
Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
Format string `long:"format" description:"the format for the spec document" default:"json" choice:"yaml" choice:"json"`
}
// Execute expands the spec
func (c *ExpandSpec) Execute(args []string) error {
if len(args) != 1 {
return errors.New("expand command requires the single swagger document url to be specified")
}
swaggerDoc := args[0]
specDoc, err := loads.Spec(swaggerDoc)
if err != nil {
return err
}
exp, err := specDoc.Expanded()
if err != nil {
return err
}
return writeToFile(exp.Spec(), !c.Compact, c.Format, string(c.Output))
}
func writeToFile(swspec *spec.Swagger, pretty bool, format string, output string) error {
var b []byte
var err error
asJSON := format == "json"
if pretty && asJSON {
b, err = json.MarshalIndent(swspec, "", " ")
} else if asJSON {
b, err = json.Marshal(swspec)
} else {
// marshals as YAML
b, err = json.Marshal(swspec)
if err == nil {
d, ery := swag.BytesToYAMLDoc(b)
if ery != nil {
return ery
}
b, err = yaml.Marshal(d)
}
}
if err != nil {
return err
}
if output == "" {
fmt.Println(string(b))
return nil
}
return ioutil.WriteFile(output, b, 0644)
}

View file

@ -0,0 +1,48 @@
package commands
import (
"errors"
"github.com/go-openapi/analysis"
"github.com/go-openapi/loads"
"github.com/go-swagger/go-swagger/cmd/swagger/commands/generate"
flags "github.com/jessevdk/go-flags"
)
// FlattenSpec is a command that flattens a swagger document
// which will expand the remote references in a spec and move inline schemas to definitions
// after flattening there are no complex inlined anymore
type FlattenSpec struct {
Compact bool `long:"compact" description:"applies to JSON formatted specs. When present, doesn't prettify the json"`
Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
Format string `long:"format" description:"the format for the spec document" default:"json" choice:"yaml" choice:"json"`
generate.FlattenCmdOptions
}
// Execute flattens the spec
func (c *FlattenSpec) Execute(args []string) error {
if len(args) != 1 {
return errors.New("flatten command requires the single swagger document url to be specified")
}
swaggerDoc := args[0]
specDoc, err := loads.Spec(swaggerDoc)
if err != nil {
return err
}
flattenOpts := c.FlattenCmdOptions.SetFlattenOptions(&analysis.FlattenOpts{
// defaults
Minimal: true,
Verbose: true,
Expand: false,
RemoveUnused: false,
})
flattenOpts.BasePath = specDoc.SpecFilePath()
flattenOpts.Spec = analysis.New(specDoc.Spec())
if err := analysis.Flatten(*flattenOpts); err != nil {
return err
}
return writeToFile(specDoc.Spec(), !c.Compact, c.Format, string(c.Output))
}

View file

@ -0,0 +1,27 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package commands
import "github.com/go-swagger/go-swagger/cmd/swagger/commands/generate"
// Generate command to group all generator commands together
type Generate struct {
Model *generate.Model `command:"model"`
Operation *generate.Operation `command:"operation"`
Support *generate.Support `command:"support"`
Server *generate.Server `command:"server"`
Spec *generate.SpecFile `command:"spec"`
Client *generate.Client `command:"client"`
}

View file

@ -0,0 +1,92 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generate
import (
"log"
"github.com/go-swagger/go-swagger/generator"
)
// Client the command to generate a swagger client
type Client struct {
shared
Name string `long:"name" short:"A" description:"the name of the application, defaults to a mangled value of info.title"`
Operations []string `long:"operation" short:"O" description:"specify an operation to include, repeat for multiple"`
Tags []string `long:"tags" description:"the tags to include, if not specified defaults to all"`
Principal string `long:"principal" short:"P" description:"the model to use for the security principal"`
Models []string `long:"model" short:"M" description:"specify a model to include, repeat for multiple"`
DefaultScheme string `long:"default-scheme" description:"the default scheme for this client" default:"http"`
DefaultProduces string `long:"default-produces" description:"the default mime type that API operations produce" default:"application/json"`
SkipModels bool `long:"skip-models" description:"no models will be generated when this flag is specified"`
SkipOperations bool `long:"skip-operations" description:"no operations will be generated when this flag is specified"`
DumpData bool `long:"dump-data" description:"when present dumps the json for the template generator instead of generating files"`
SkipValidation bool `long:"skip-validation" description:"skips validation of spec prior to generation"`
}
func (c *Client) getOpts() (*generator.GenOpts, error) {
return &generator.GenOpts{
Spec: string(c.Spec),
Target: string(c.Target),
APIPackage: c.APIPackage,
ModelPackage: c.ModelPackage,
ServerPackage: c.ServerPackage,
ClientPackage: c.ClientPackage,
Principal: c.Principal,
DefaultScheme: c.DefaultScheme,
DefaultProduces: c.DefaultProduces,
IncludeModel: !c.SkipModels,
IncludeValidator: !c.SkipModels,
IncludeHandler: !c.SkipOperations,
IncludeParameters: !c.SkipOperations,
IncludeResponses: !c.SkipOperations,
ValidateSpec: !c.SkipValidation,
Tags: c.Tags,
IncludeSupport: true,
Template: c.Template,
TemplateDir: string(c.TemplateDir),
DumpData: c.DumpData,
ExistingModels: c.ExistingModels,
IsClient: true,
}, nil
}
func (c *Client) getShared() *shared {
return &c.shared
}
func (c *Client) generate(opts *generator.GenOpts) error {
return generator.GenerateClient(c.Name, c.Models, c.Operations, opts)
}
func (c *Client) log(rp string) {
log.Printf(`Generation completed!
For this generation to compile you need to have some packages in your GOPATH:
* github.com/go-openapi/errors
* github.com/go-openapi/runtime
* github.com/go-openapi/runtime/client
* github.com/go-openapi/strfmt
You can get these now with: go get -u -f %s/...
`, rp)
}
// Execute runs this command
func (c *Client) Execute(args []string) error {
return createSwagger(c)
}

View file

@ -0,0 +1,16 @@
package generate
import (
"github.com/go-swagger/go-swagger/generator"
)
// contribOptionsOverride gives contributed templates the ability to override the options if they need
func contribOptionsOverride(opts *generator.GenOpts) {
switch opts.Template {
case "stratoscale":
// Stratoscale template needs to regenerate the configureapi on every run.
opts.RegenerateConfigureAPI = true
// It also does not use the main.go
opts.IncludeMain = false
}
}

View file

@ -0,0 +1,53 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generate
import (
"errors"
"log"
)
// Model the generate model file command
type Model struct {
shared
Name []string `long:"name" short:"n" description:"the model to generate"`
NoStruct bool `long:"skip-struct" description:"when present will not generate the model struct"`
DumpData bool `long:"dump-data" description:"when present dumps the json for the template generator instead of generating files"`
SkipValidation bool `long:"skip-validation" description:"skips validation of spec prior to generation"`
}
// Execute generates a model file
func (m *Model) Execute(args []string) error {
if m.DumpData && len(m.Name) > 1 {
return errors.New("only 1 model at a time is supported for dumping data")
}
if m.ExistingModels != "" {
log.Println("warning: Ignoring existing-models flag when generating models.")
}
s := &Server{
shared: m.shared,
Models: m.Name,
DumpData: m.DumpData,
ExcludeMain: true,
ExcludeSpec: true,
SkipSupport: true,
SkipOperations: true,
SkipModels: m.NoStruct,
SkipValidation: m.SkipValidation,
}
return s.Execute(args)
}

View file

@ -0,0 +1,87 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generate
import (
"errors"
"log"
"github.com/go-swagger/go-swagger/generator"
)
// Operation the generate operation files command
type Operation struct {
shared
Name []string `long:"name" short:"n" required:"true" description:"the operations to generate, repeat for multiple"`
Tags []string `long:"tags" description:"the tags to include, if not specified defaults to all"`
Principal string `short:"P" long:"principal" description:"the model to use for the security principal"`
DefaultScheme string `long:"default-scheme" description:"the default scheme for this API" default:"http"`
NoHandler bool `long:"skip-handler" description:"when present will not generate an operation handler"`
NoStruct bool `long:"skip-parameters" description:"when present will not generate the parameter model struct"`
NoResponses bool `long:"skip-responses" description:"when present will not generate the response model struct"`
NoURLBuilder bool `long:"skip-url-builder" description:"when present will not generate a URL builder"`
DumpData bool `long:"dump-data" description:"when present dumps the json for the template generator instead of generating files"`
SkipValidation bool `long:"skip-validation" description:"skips validation of spec prior to generation"`
}
func (o *Operation) getOpts() (*generator.GenOpts, error) {
return &generator.GenOpts{
Spec: string(o.Spec),
Target: string(o.Target),
APIPackage: o.APIPackage,
ModelPackage: o.ModelPackage,
ServerPackage: o.ServerPackage,
ClientPackage: o.ClientPackage,
Principal: o.Principal,
DumpData: o.DumpData,
DefaultScheme: o.DefaultScheme,
TemplateDir: string(o.TemplateDir),
IncludeHandler: !o.NoHandler,
IncludeResponses: !o.NoResponses,
IncludeParameters: !o.NoStruct,
IncludeURLBuilder: !o.NoURLBuilder,
Tags: o.Tags,
ValidateSpec: !o.SkipValidation,
}, nil
}
func (o *Operation) getShared() *shared {
return &o.shared
}
func (o *Operation) generate(opts *generator.GenOpts) error {
return generator.GenerateServerOperation(o.Name, opts)
}
func (o *Operation) log(rp string) {
log.Printf(`Generation completed!
For this generation to compile you need to have some packages in your GOPATH:
* github.com/go-openapi/runtime
You can get these now with: go get -u -f %s/...
`, rp)
}
// Execute generates a model file
func (o *Operation) Execute(args []string) error {
if o.DumpData && len(o.Name) > 1 {
return errors.New("only 1 operation at a time is supported for dumping data")
}
return createSwagger(o)
}

View file

@ -0,0 +1,119 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generate
import (
"log"
"strings"
"github.com/go-swagger/go-swagger/generator"
)
// Server the command to generate an entire server application
type Server struct {
shared
Name string `long:"name" short:"A" description:"the name of the application, defaults to a mangled value of info.title"`
Operations []string `long:"operation" short:"O" description:"specify an operation to include, repeat for multiple"`
Tags []string `long:"tags" description:"the tags to include, if not specified defaults to all"`
Principal string `long:"principal" short:"P" description:"the model to use for the security principal"`
DefaultScheme string `long:"default-scheme" description:"the default scheme for this API" default:"http"`
Models []string `long:"model" short:"M" description:"specify a model to include, repeat for multiple"`
SkipModels bool `long:"skip-models" description:"no models will be generated when this flag is specified"`
SkipOperations bool `long:"skip-operations" description:"no operations will be generated when this flag is specified"`
SkipSupport bool `long:"skip-support" description:"no supporting files will be generated when this flag is specified"`
ExcludeMain bool `long:"exclude-main" description:"exclude main function, so just generate the library"`
ExcludeSpec bool `long:"exclude-spec" description:"don't embed the swagger specification"`
WithContext bool `long:"with-context" description:"handlers get a context as first arg (deprecated)"`
DumpData bool `long:"dump-data" description:"when present dumps the json for the template generator instead of generating files"`
FlagStrategy string `long:"flag-strategy" description:"the strategy to provide flags for the server" default:"go-flags" choice:"go-flags" choice:"pflag"`
CompatibilityMode string `long:"compatibility-mode" description:"the compatibility mode for the tls server" default:"modern" choice:"modern" choice:"intermediate"`
SkipValidation bool `long:"skip-validation" description:"skips validation of spec prior to generation"`
RegenerateConfigureAPI bool `long:"regenerate-configureapi" description:"Force regeneration of configureapi.go"`
KeepSpecOrder bool `long:"keep-spec-order" description:"Keep schema properties order identical to spec file"`
StrictAdditionalProperties bool `long:"strict-additional-properties" description:"disallow extra properties when additionalProperties is set to false"`
}
func (s *Server) getOpts() (*generator.GenOpts, error) {
// warning: deprecation
if s.WithContext {
log.Printf("warning: deprecated option --with-context is ignored")
}
return &generator.GenOpts{
Spec: string(s.Spec),
Target: string(s.Target),
APIPackage: s.APIPackage,
ModelPackage: s.ModelPackage,
ServerPackage: s.ServerPackage,
ClientPackage: s.ClientPackage,
Principal: s.Principal,
DefaultScheme: s.DefaultScheme,
IncludeModel: !s.SkipModels,
IncludeValidator: !s.SkipModels,
IncludeHandler: !s.SkipOperations,
IncludeParameters: !s.SkipOperations,
IncludeResponses: !s.SkipOperations,
IncludeURLBuilder: !s.SkipOperations,
IncludeMain: !s.ExcludeMain,
IncludeSupport: !s.SkipSupport,
PropertiesSpecOrder: s.KeepSpecOrder,
ValidateSpec: !s.SkipValidation,
ExcludeSpec: s.ExcludeSpec,
StrictAdditionalProperties: s.StrictAdditionalProperties,
Template: s.Template,
RegenerateConfigureAPI: s.RegenerateConfigureAPI,
TemplateDir: string(s.TemplateDir),
DumpData: s.DumpData,
Models: s.Models,
Operations: s.Operations,
Tags: s.Tags,
Name: s.Name,
FlagStrategy: s.FlagStrategy,
CompatibilityMode: s.CompatibilityMode,
ExistingModels: s.ExistingModels,
}, nil
}
func (s *Server) getShared() *shared {
return &s.shared
}
func (s *Server) generate(opts *generator.GenOpts) error {
return generator.GenerateServer(s.Name, s.Models, s.Operations, opts)
}
func (s *Server) log(rp string) {
var flagsPackage string
if strings.HasPrefix(s.FlagStrategy, "pflag") {
flagsPackage = "github.com/spf13/pflag"
} else {
flagsPackage = "github.com/jessevdk/go-flags"
}
log.Printf(`Generation completed!
For this generation to compile you need to have some packages in your GOPATH:
* github.com/go-openapi/runtime
* `+flagsPackage+`
You can get these now with: go get -u -f %s/...
`, rp)
}
// Execute runs this command
func (s *Server) Execute(args []string) error {
return createSwagger(s)
}

View file

@ -0,0 +1,213 @@
package generate
import (
"io/ioutil"
"log"
"os"
"path/filepath"
"github.com/go-openapi/analysis"
"github.com/go-openapi/swag"
"github.com/go-swagger/go-swagger/generator"
flags "github.com/jessevdk/go-flags"
"github.com/spf13/viper"
)
// FlattenCmdOptions determines options to the flatten spec preprocessing
type FlattenCmdOptions struct {
WithExpand bool `long:"with-expand" description:"expands all $ref's in spec prior to generation (shorthand to --with-flatten=expand)"`
WithFlatten []string `long:"with-flatten" description:"flattens all $ref's in spec prior to generation" choice:"minimal" choice:"full" choice:"expand" choice:"verbose" choice:"noverbose" choice:"remove-unused" default:"minimal" default:"verbose"`
}
// SetFlattenOptions builds flatten options from command line args
func (f *FlattenCmdOptions) SetFlattenOptions(dflt *analysis.FlattenOpts) (res *analysis.FlattenOpts) {
res = &analysis.FlattenOpts{}
if dflt != nil {
*res = *dflt
}
if f == nil {
return
}
verboseIsSet := false
minimalIsSet := false
//removeUnusedIsSet := false
expandIsSet := false
if f.WithExpand {
res.Expand = true
expandIsSet = true
}
for _, opt := range f.WithFlatten {
if opt == "verbose" {
res.Verbose = true
verboseIsSet = true
}
if opt == "noverbose" && !verboseIsSet {
// verbose flag takes precedence
res.Verbose = false
verboseIsSet = true
}
if opt == "remove-unused" {
res.RemoveUnused = true
//removeUnusedIsSet = true
}
if opt == "expand" {
res.Expand = true
expandIsSet = true
}
if opt == "full" && !minimalIsSet && !expandIsSet {
// minimal flag takes precedence
res.Minimal = false
minimalIsSet = true
}
if opt == "minimal" && !expandIsSet {
// expand flag takes precedence
res.Minimal = true
minimalIsSet = true
}
}
return
}
type shared struct {
Spec flags.Filename `long:"spec" short:"f" description:"the spec file to use (default swagger.{json,yml,yaml})"`
APIPackage string `long:"api-package" short:"a" description:"the package to save the operations" default:"operations"`
ModelPackage string `long:"model-package" short:"m" description:"the package to save the models" default:"models"`
ServerPackage string `long:"server-package" short:"s" description:"the package to save the server specific code" default:"restapi"`
ClientPackage string `long:"client-package" short:"c" description:"the package to save the client specific code" default:"client"`
Target flags.Filename `long:"target" short:"t" default:"./" description:"the base directory for generating the files"`
Template string `long:"template" description:"Load contributed templates" choice:"stratoscale"`
TemplateDir flags.Filename `long:"template-dir" short:"T" description:"alternative template override directory"`
ConfigFile flags.Filename `long:"config-file" short:"C" description:"configuration file to use for overriding template options"`
CopyrightFile flags.Filename `long:"copyright-file" short:"r" description:"copyright file used to add copyright header"`
ExistingModels string `long:"existing-models" description:"use pre-generated models e.g. github.com/foobar/model"`
AdditionalInitialisms []string `long:"additional-initialism" description:"consecutive capitals that should be considered intialisms"`
FlattenCmdOptions
}
type sharedCommand interface {
getOpts() (*generator.GenOpts, error)
getShared() *shared
getConfigFile() flags.Filename
getAdditionalInitialisms() []string
generate(*generator.GenOpts) error
log(string)
}
func (s *shared) getConfigFile() flags.Filename {
return s.ConfigFile
}
func (s *shared) getAdditionalInitialisms() []string {
return s.AdditionalInitialisms
}
func (s *shared) setCopyright() (string, error) {
var copyrightstr string
copyrightfile := string(s.CopyrightFile)
if copyrightfile != "" {
//Read the Copyright from file path in opts
bytebuffer, err := ioutil.ReadFile(copyrightfile)
if err != nil {
return "", err
}
copyrightstr = string(bytebuffer)
} else {
copyrightstr = ""
}
return copyrightstr, nil
}
func createSwagger(s sharedCommand) error {
cfg, erc := readConfig(string(s.getConfigFile()))
if erc != nil {
return erc
}
setDebug(cfg)
opts, ero := s.getOpts()
if ero != nil {
return ero
}
if opts.Template != "" {
contribOptionsOverride(opts)
}
if err := opts.EnsureDefaults(); err != nil {
return err
}
if err := configureOptsFromConfig(cfg, opts); err != nil {
return err
}
swag.AddInitialisms(s.getAdditionalInitialisms()...)
if sharedOpts := s.getShared(); sharedOpts != nil {
// process shared options
opts.FlattenOpts = sharedOpts.FlattenCmdOptions.SetFlattenOptions(opts.FlattenOpts)
copyrightStr, erc := sharedOpts.setCopyright()
if erc != nil {
return erc
}
opts.Copyright = copyrightStr
}
if err := s.generate(opts); err != nil {
return err
}
basepath, era := filepath.Abs(".")
if era != nil {
return era
}
targetAbs, err := filepath.Abs(opts.Target)
if err != nil {
return err
}
rp, err := filepath.Rel(basepath, targetAbs)
if err != nil {
return err
}
s.log(rp)
return nil
}
func readConfig(filename string) (*viper.Viper, error) {
if filename == "" {
return nil, nil
}
abspath, err := filepath.Abs(filename)
if err != nil {
return nil, err
}
log.Println("trying to read config from", abspath)
return generator.ReadConfig(abspath)
}
func configureOptsFromConfig(cfg *viper.Viper, opts *generator.GenOpts) error {
if cfg == nil {
return nil
}
var def generator.LanguageDefinition
if err := cfg.Unmarshal(&def); err != nil {
return err
}
return def.ConfigureOpts(opts)
}
func setDebug(cfg *viper.Viper) {
if os.Getenv("DEBUG") != "" || os.Getenv("SWAGGER_DEBUG") != "" {
if cfg != nil {
cfg.Debug()
} else {
log.Println("NO config read")
}
}
}

View file

@ -0,0 +1,125 @@
//+build !go1.11
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generate
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"strings"
"github.com/go-openapi/loads"
"github.com/go-openapi/spec"
"github.com/go-swagger/go-swagger/scan"
"github.com/jessevdk/go-flags"
"gopkg.in/yaml.v2"
)
// SpecFile command to generate a swagger spec from a go application
type SpecFile struct {
BasePath string `long:"base-path" short:"b" description:"the base path to use" default:"."`
BuildTags string `long:"tags" short:"t" description:"build tags" default:""`
ScanModels bool `long:"scan-models" short:"m" description:"includes models that were annotated with 'swagger:model'"`
Compact bool `long:"compact" description:"when present, doesn't prettify the json"`
Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
Input flags.Filename `long:"input" short:"i" description:"the file to use as input"`
Include []string `long:"include" short:"c" description:"include packages matching pattern"`
Exclude []string `long:"exclude" short:"x" description:"exclude packages matching pattern"`
IncludeTags []string `long:"include-tag" short:"" description:"include routes having specified tags (can be specified many times)"`
ExcludeTags []string `long:"exclude-tag" short:"" description:"exclude routes having specified tags (can be specified many times)"`
}
// Execute runs this command
func (s *SpecFile) Execute(args []string) error {
input, err := loadSpec(string(s.Input))
if err != nil {
return err
}
var opts scan.Opts
opts.BasePath = s.BasePath
opts.Input = input
opts.ScanModels = s.ScanModels
opts.BuildTags = s.BuildTags
opts.Include = s.Include
opts.Exclude = s.Exclude
opts.IncludeTags = s.IncludeTags
opts.ExcludeTags = s.ExcludeTags
swspec, err := scan.Application(opts)
if err != nil {
return err
}
return writeToFile(swspec, !s.Compact, string(s.Output))
}
func loadSpec(input string) (*spec.Swagger, error) {
if fi, err := os.Stat(input); err == nil {
if fi.IsDir() {
return nil, fmt.Errorf("expected %q to be a file not a directory", input)
}
sp, err := loads.Spec(input)
if err != nil {
return nil, err
}
return sp.Spec(), nil
}
return nil, nil
}
func writeToFile(swspec *spec.Swagger, pretty bool, output string) error {
var b []byte
var err error
if strings.HasSuffix(output, "yml") || strings.HasSuffix(output, "yaml") {
b, err = marshalToYAMLFormat(swspec)
} else {
b, err = marshalToJSONFormat(swspec, pretty)
}
if err != nil {
return err
}
if output == "" {
fmt.Println(string(b))
return nil
}
return ioutil.WriteFile(output, b, 0644)
}
func marshalToJSONFormat(swspec *spec.Swagger, pretty bool) ([]byte, error) {
if pretty {
return json.MarshalIndent(swspec, "", " ")
}
return json.Marshal(swspec)
}
func marshalToYAMLFormat(swspec *spec.Swagger) ([]byte, error) {
b, err := json.Marshal(swspec)
if err != nil {
return nil, err
}
var jsonObj interface{}
if err := yaml.Unmarshal(b, &jsonObj); err != nil {
return nil, err
}
return yaml.Marshal(jsonObj)
}

View file

@ -0,0 +1,119 @@
// +build go1.11
package generate
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"strings"
"github.com/go-swagger/go-swagger/codescan"
"github.com/go-openapi/loads"
"github.com/go-openapi/spec"
"github.com/jessevdk/go-flags"
"gopkg.in/yaml.v2"
)
// SpecFile command to generate a swagger spec from a go application
type SpecFile struct {
WorkDir string `long:"work-dir" short:"w" description:"the base path to use" default:"."`
BuildTags string `long:"tags" short:"t" description:"build tags" default:""`
ScanModels bool `long:"scan-models" short:"m" description:"includes models that were annotated with 'swagger:model'"`
Compact bool `long:"compact" description:"when present, doesn't prettify the json"`
Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
Input flags.Filename `long:"input" short:"i" description:"the file to use as input"`
Include []string `long:"include" short:"c" description:"include packages matching pattern"`
Exclude []string `long:"exclude" short:"x" description:"exclude packages matching pattern"`
IncludeTags []string `long:"include-tag" short:"" description:"include routes having specified tags (can be specified many times)"`
ExcludeTags []string `long:"exclude-tag" short:"" description:"exclude routes having specified tags (can be specified many times)"`
ExcludeDeps bool `long:"exclude-deps" short:"" description:"exclude all dependencies of project"`
}
// Execute runs this command
func (s *SpecFile) Execute(args []string) error {
if len(args) == 0 { // by default consider all the paths under the working directory
args = []string{"./..."}
}
input, err := loadSpec(string(s.Input))
if err != nil {
return err
}
var opts codescan.Options
opts.Packages = args
opts.WorkDir = s.WorkDir
opts.InputSpec = input
opts.ScanModels = s.ScanModels
opts.BuildTags = s.BuildTags
opts.Include = s.Include
opts.Exclude = s.Exclude
opts.IncludeTags = s.IncludeTags
opts.ExcludeTags = s.ExcludeTags
opts.ExcludeDeps = s.ExcludeDeps
swspec, err := codescan.Run(&opts)
if err != nil {
return err
}
return writeToFile(swspec, !s.Compact, string(s.Output))
}
func loadSpec(input string) (*spec.Swagger, error) {
if fi, err := os.Stat(input); err == nil {
if fi.IsDir() {
return nil, fmt.Errorf("expected %q to be a file not a directory", input)
}
sp, err := loads.Spec(input)
if err != nil {
return nil, err
}
return sp.Spec(), nil
}
return nil, nil
}
func writeToFile(swspec *spec.Swagger, pretty bool, output string) error {
var b []byte
var err error
if strings.HasSuffix(output, "yml") || strings.HasSuffix(output, "yaml") {
b, err = marshalToYAMLFormat(swspec)
} else {
b, err = marshalToJSONFormat(swspec, pretty)
}
if err != nil {
return err
}
if output == "" {
fmt.Println(string(b))
return nil
}
return ioutil.WriteFile(output, b, 0644)
}
func marshalToJSONFormat(swspec *spec.Swagger, pretty bool) ([]byte, error) {
if pretty {
return json.MarshalIndent(swspec, "", " ")
}
return json.Marshal(swspec)
}
func marshalToYAMLFormat(swspec *spec.Swagger) ([]byte, error) {
b, err := json.Marshal(swspec)
if err != nil {
return nil, err
}
var jsonObj interface{}
if err := yaml.Unmarshal(b, &jsonObj); err != nil {
return nil, err
}
return yaml.Marshal(jsonObj)
}

View file

@ -0,0 +1,76 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generate
import (
"log"
"github.com/go-swagger/go-swagger/generator"
)
// Support generates the supporting files
type Support struct {
shared
Name string `long:"name" short:"A" description:"the name of the application, defaults to a mangled value of info.title"`
Operations []string `long:"operation" short:"O" description:"specify an operation to include, repeat for multiple"`
Principal string `long:"principal" description:"the model to use for the security principal"`
Models []string `long:"model" short:"M" description:"specify a model to include, repeat for multiple"`
DumpData bool `long:"dump-data" description:"when present dumps the json for the template generator instead of generating files"`
DefaultScheme string `long:"default-scheme" description:"the default scheme for this API" default:"http"`
}
func (s *Support) getOpts() (*generator.GenOpts, error) {
return &generator.GenOpts{
Spec: string(s.Spec),
Target: string(s.Target),
APIPackage: s.APIPackage,
ModelPackage: s.ModelPackage,
ServerPackage: s.ServerPackage,
ClientPackage: s.ClientPackage,
Principal: s.Principal,
DumpData: s.DumpData,
DefaultScheme: s.DefaultScheme,
Template: s.Template,
TemplateDir: string(s.TemplateDir),
}, nil
}
func (s *Support) getShared() *shared {
return &s.shared
}
func (s *Support) generate(opts *generator.GenOpts) error {
return generator.GenerateSupport(s.Name, nil, nil, opts)
}
func (s *Support) log(rp string) {
log.Printf(`Generation completed!
For this generation to compile you need to have some packages in your vendor or GOPATH:
* github.com/go-openapi/runtime
* github.com/asaskevich/govalidator
* github.com/jessevdk/go-flags
* golang.org/x/net/context/ctxhttp
You can get these now with: go get -u -f %s/...
`, rp)
}
// Execute generates the supporting files file
func (s *Support) Execute(args []string) error {
return createSwagger(s)
}

View file

@ -0,0 +1,13 @@
package commands
import "github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd"
// InitCmd is a command namespace for initializing things like a swagger spec.
type InitCmd struct {
Model *initcmd.Spec `command:"spec"`
}
// Execute provides default empty implementation
func (i *InitCmd) Execute(args []string) error {
return nil
}

View file

@ -0,0 +1,111 @@
package initcmd
import (
"encoding/json"
"fmt"
"log"
"os"
"path/filepath"
"gopkg.in/yaml.v2"
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
)
// Spec a command struct for initializing a new swagger application.
type Spec struct {
Format string `long:"format" description:"the format for the spec document" default:"yaml" choice:"yaml" choice:"json"`
Title string `long:"title" description:"the title of the API"`
Description string `long:"description" description:"the description of the API"`
Version string `long:"version" description:"the version of the API" default:"0.1.0"`
Terms string `long:"terms" description:"the terms of services"`
Consumes []string `long:"consumes" description:"add a content type to the global consumes definitions, can repeat" default:"application/json"`
Produces []string `long:"produces" description:"add a content type to the global produces definitions, can repeat" default:"application/json"`
Schemes []string `long:"scheme" description:"add a scheme to the global schemes definition, can repeat" default:"http"`
Contact struct {
Name string `long:"contact.name" description:"name of the primary contact for the API"`
URL string `long:"contact.url" description:"url of the primary contact for the API"`
Email string `long:"contact.email" description:"email of the primary contact for the API"`
}
License struct {
Name string `long:"license.name" description:"name of the license for the API"`
URL string `long:"license.url" description:"url of the license for the API"`
}
}
// Execute this command
func (s *Spec) Execute(args []string) error {
targetPath := "."
if len(args) > 0 {
targetPath = args[0]
}
realPath, err := filepath.Abs(targetPath)
if err != nil {
return err
}
var file *os.File
switch s.Format {
case "json":
file, err = os.Create(filepath.Join(realPath, "swagger.json"))
if err != nil {
return err
}
case "yaml", "yml":
file, err = os.Create(filepath.Join(realPath, "swagger.yml"))
if err != nil {
return err
}
default:
return fmt.Errorf("invalid format: %s", s.Format)
}
defer file.Close()
log.Println("creating specification document in", filepath.Join(targetPath, file.Name()))
var doc spec.Swagger
info := new(spec.Info)
doc.Info = info
doc.Swagger = "2.0"
doc.Paths = new(spec.Paths)
doc.Definitions = make(spec.Definitions)
info.Title = s.Title
if info.Title == "" {
info.Title = swag.ToHumanNameTitle(filepath.Base(realPath))
}
info.Description = s.Description
info.Version = s.Version
info.TermsOfService = s.Terms
if s.Contact.Name != "" || s.Contact.Email != "" || s.Contact.URL != "" {
var contact spec.ContactInfo
contact.Name = s.Contact.Name
contact.Email = s.Contact.Email
contact.URL = s.Contact.URL
info.Contact = &contact
}
if s.License.Name != "" || s.License.URL != "" {
var license spec.License
license.Name = s.License.Name
license.URL = s.License.URL
info.License = &license
}
doc.Consumes = append(doc.Consumes, s.Consumes...)
doc.Produces = append(doc.Produces, s.Produces...)
doc.Schemes = append(doc.Schemes, s.Schemes...)
if s.Format == "json" {
enc := json.NewEncoder(file)
return enc.Encode(doc)
}
b, err := yaml.Marshal(swag.ToDynamicJSON(doc))
if err != nil {
return err
}
if _, err := file.Write(b); err != nil {
return err
}
return nil
}

View file

@ -0,0 +1,103 @@
package commands
import (
"errors"
"io"
"log"
"os"
"github.com/go-openapi/analysis"
"github.com/go-openapi/loads"
"github.com/go-openapi/spec"
flags "github.com/jessevdk/go-flags"
)
const (
// Output messages
nothingToDo = "Nothing to do. Need some swagger files to merge.\nUSAGE: swagger mixin [-c <expected#Collisions>] <primary-swagger-file> <mixin-swagger-file>..."
)
// MixinSpec holds command line flag definitions specific to the mixin
// command. The flags are defined using struct field tags with the
// "github.com/jessevdk/go-flags" format.
type MixinSpec struct {
ExpectedCollisionCount uint `short:"c" description:"expected # of rejected mixin paths, defs, etc due to existing key. Non-zero exit if does not match actual."`
Compact bool `long:"compact" description:"applies to JSON formatted specs. When present, doesn't prettify the json"`
Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
Format string `long:"format" description:"the format for the spec document" default:"json" choice:"yaml" choice:"json"`
}
// Execute runs the mixin command which merges Swagger 2.0 specs into
// one spec
//
// Use cases include adding independently versioned metadata APIs to
// application APIs for microservices.
//
// Typically, multiple APIs to the same service instance is not a
// problem for client generation as you can create more than one
// client to the service from the same calling process (one for each
// API). However, merging clients can improve clarity of client code
// by having a single client to given service vs several.
//
// Server skeleton generation, ie generating the model & marshaling
// code, http server instance etc. from Swagger, becomes easier with a
// merged spec for some tools & target-languages. Server code
// generation tools that natively support hosting multiple specs in
// one server process will not need this tool.
func (c *MixinSpec) Execute(args []string) error {
if len(args) < 2 {
return errors.New(nothingToDo)
}
log.Printf("args[0] = %v\n", args[0])
log.Printf("args[1:] = %v\n", args[1:])
collisions, err := c.MixinFiles(args[0], args[1:], os.Stdout)
for _, warn := range collisions {
log.Println(warn)
}
if err != nil {
return err
}
if len(collisions) != int(c.ExpectedCollisionCount) {
if len(collisions) != 0 {
// use bash $? to get actual # collisions
// (but has to be non-zero)
os.Exit(len(collisions))
}
os.Exit(254)
}
return nil
}
// MixinFiles is a convenience function for Mixin that reads the given
// swagger files, adds the mixins to primary, calls
// FixEmptyResponseDescriptions on the primary, and writes the primary
// with mixins to the given writer in JSON. Returns the warning
// messages for collisions that occurred during mixin process and any
// error.
func (c *MixinSpec) MixinFiles(primaryFile string, mixinFiles []string, w io.Writer) ([]string, error) {
primaryDoc, err := loads.Spec(primaryFile)
if err != nil {
return nil, err
}
primary := primaryDoc.Spec()
var mixins []*spec.Swagger
for _, mixinFile := range mixinFiles {
mixin, lerr := loads.Spec(mixinFile)
if lerr != nil {
return nil, lerr
}
mixins = append(mixins, mixin.Spec())
}
collisions := analysis.Mixin(primary, mixins...)
analysis.FixEmptyResponseDescriptions(primary)
return collisions, writeToFile(primary, !c.Compact, c.Format, string(c.Output))
}

View file

@ -0,0 +1,107 @@
package commands
import (
"encoding/json"
"errors"
"fmt"
"log"
"net"
"net/http"
"net/url"
"path"
"strconv"
"github.com/go-openapi/loads"
"github.com/go-openapi/runtime/middleware"
"github.com/go-openapi/swag"
"github.com/gorilla/handlers"
"github.com/toqueteos/webbrowser"
)
// ServeCmd to serve a swagger spec with docs ui
type ServeCmd struct {
BasePath string `long:"base-path" description:"the base path to serve the spec and UI at"`
Flavor string `short:"F" long:"flavor" description:"the flavor of docs, can be swagger or redoc" default:"redoc" choice:"redoc" choice:"swagger"`
DocURL string `long:"doc-url" description:"override the url which takes a url query param to render the doc ui"`
NoOpen bool `long:"no-open" description:"when present won't open the the browser to show the url"`
NoUI bool `long:"no-ui" description:"when present, only the swagger spec will be served"`
Port int `long:"port" short:"p" description:"the port to serve this site" env:"PORT"`
Host string `long:"host" description:"the interface to serve this site, defaults to 0.0.0.0" env:"HOST"`
}
// Execute the serve command
func (s *ServeCmd) Execute(args []string) error {
if len(args) == 0 {
return errors.New("specify the spec to serve as argument to the serve command")
}
specDoc, err := loads.Spec(args[0])
if err != nil {
return err
}
b, err := json.MarshalIndent(specDoc.Spec(), "", " ")
if err != nil {
return err
}
basePath := s.BasePath
if basePath == "" {
basePath = "/"
}
listener, err := net.Listen("tcp4", net.JoinHostPort(s.Host, strconv.Itoa(s.Port)))
if err != nil {
return err
}
sh, sp, err := swag.SplitHostPort(listener.Addr().String())
if err != nil {
return err
}
if sh == "0.0.0.0" {
sh = "localhost"
}
visit := s.DocURL
handler := http.NotFoundHandler()
if !s.NoUI {
if s.Flavor == "redoc" {
handler = middleware.Redoc(middleware.RedocOpts{
BasePath: basePath,
SpecURL: path.Join(basePath, "swagger.json"),
Path: "docs",
}, handler)
visit = fmt.Sprintf("http://%s:%d%s", sh, sp, path.Join(basePath, "docs"))
} else if visit != "" || s.Flavor == "swagger" {
if visit == "" {
visit = "http://petstore.swagger.io/"
}
u, err := url.Parse(visit)
if err != nil {
return err
}
q := u.Query()
q.Add("url", fmt.Sprintf("http://%s:%d%s", sh, sp, path.Join(basePath, "swagger.json")))
u.RawQuery = q.Encode()
visit = u.String()
}
}
handler = handlers.CORS()(middleware.Spec(basePath, b, handler))
errFuture := make(chan error)
go func() {
docServer := new(http.Server)
docServer.SetKeepAlivesEnabled(true)
docServer.Handler = handler
errFuture <- docServer.Serve(listener)
}()
if !s.NoOpen && !s.NoUI {
err := webbrowser.Open(visit)
if err != nil {
return err
}
}
log.Println("serving docs at", visit)
return <-errFuture
}

View file

@ -0,0 +1,83 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package commands
import (
"errors"
"fmt"
"log"
"github.com/go-openapi/loads"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/validate"
)
const (
// Output messages
missingArgMsg = "The validate command requires the swagger document url to be specified"
validSpecMsg = "\nThe swagger spec at %q is valid against swagger specification %s\n"
invalidSpecMsg = "\nThe swagger spec at %q is invalid against swagger specification %s.\nSee errors below:\n"
warningSpecMsg = "\nThe swagger spec at %q showed up some valid but possibly unwanted constructs."
)
// ValidateSpec is a command that validates a swagger document
// against the swagger specification
type ValidateSpec struct {
// SchemaURL string `long:"schema" description:"The schema url to use" default:"http://swagger.io/v2/schema.json"`
SkipWarnings bool `long:"skip-warnings" description:"when present will not show up warnings upon validation"`
StopOnError bool `long:"stop-on-error" description:"when present will not continue validation after critical errors are found"`
}
// Execute validates the spec
func (c *ValidateSpec) Execute(args []string) error {
if len(args) == 0 {
return errors.New(missingArgMsg)
}
swaggerDoc := args[0]
specDoc, err := loads.Spec(swaggerDoc)
if err != nil {
return err
}
// Attempts to report about all errors
validate.SetContinueOnErrors(!c.StopOnError)
v := validate.NewSpecValidator(specDoc.Schema(), strfmt.Default)
result, _ := v.Validate(specDoc) // returns fully detailed result with errors and warnings
if result.IsValid() {
log.Printf(validSpecMsg, swaggerDoc, specDoc.Version())
}
if result.HasWarnings() {
log.Printf(warningSpecMsg, swaggerDoc)
if !c.SkipWarnings {
log.Printf("See warnings below:\n")
for _, desc := range result.Warnings {
log.Printf("- WARNING: %s\n", desc.Error())
}
}
}
if result.HasErrors() {
str := fmt.Sprintf(invalidSpecMsg, swaggerDoc, specDoc.Version())
for _, desc := range result.Errors {
str += fmt.Sprintf("- %s\n", desc.Error())
}
return errors.New(str)
}
return nil
}

View file

@ -0,0 +1,26 @@
package commands
import "fmt"
var (
// Version for the swagger command
Version string
// Commit for the swagger command
Commit string
)
// PrintVersion the command
type PrintVersion struct {
}
// Execute this command
func (p *PrintVersion) Execute(args []string) error {
if Version == "" {
fmt.Println("dev")
return nil
}
fmt.Println("version:", Version)
fmt.Println("commit:", Commit)
return nil
}

View file

@ -0,0 +1,148 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"io/ioutil"
"log"
"os"
"github.com/go-openapi/loads"
"github.com/go-openapi/loads/fmts"
"github.com/go-swagger/go-swagger/cmd/swagger/commands"
flags "github.com/jessevdk/go-flags"
)
func init() {
loads.AddLoader(fmts.YAMLMatcher, fmts.YAMLDoc)
}
var (
// Debug is true when the SWAGGER_DEBUG env var is not empty
Debug = os.Getenv("SWAGGER_DEBUG") != ""
)
var opts struct {
// General options applicable to all commands
Quiet func() `long:"quiet" short:"q" description:"silence logs"`
LogFile func(string) `long:"log-output" description:"redirect logs to file" value-name:"LOG-FILE"`
// Version bool `long:"version" short:"v" description:"print the version of the command"`
}
func main() {
// TODO: reactivate 'defer catch all' once product is stable
// Recovering from internal panics
// Stack may be printed in Debug mode
// Need import "runtime/debug".
//defer func() {
// r := recover()
// if r != nil {
// log.Printf("Fatal error:", r)
// if Debug {
// debug.PrintStack()
// }
// os.Exit(1)
// }
//}()
parser := flags.NewParser(&opts, flags.Default)
parser.ShortDescription = "helps you keep your API well described"
parser.LongDescription = `
Swagger tries to support you as best as possible when building APIs.
It aims to represent the contract of your API with a language agnostic description of your application in json or yaml.
`
_, err := parser.AddCommand("validate", "validate the swagger document", "validate the provided swagger document against a swagger spec", &commands.ValidateSpec{})
if err != nil {
log.Fatal(err)
}
_, err = parser.AddCommand("init", "initialize a spec document", "initialize a swagger spec document", &commands.InitCmd{})
if err != nil {
log.Fatal(err)
}
_, err = parser.AddCommand("version", "print the version", "print the version of the swagger command", &commands.PrintVersion{})
if err != nil {
log.Fatal(err)
}
_, err = parser.AddCommand("serve", "serve spec and docs", "serve a spec and swagger or redoc documentation ui", &commands.ServeCmd{})
if err != nil {
log.Fatal(err)
}
_, err = parser.AddCommand("expand", "expand $ref fields in a swagger spec", "expands the $refs in a swagger document to inline schemas", &commands.ExpandSpec{})
if err != nil {
log.Fatal(err)
}
_, err = parser.AddCommand("flatten", "flattens a swagger document", "expand the remote references in a spec and move inline schemas to definitions, after flattening there are no complex inlined anymore", &commands.FlattenSpec{})
if err != nil {
log.Fatal(err)
}
_, err = parser.AddCommand("mixin", "merge swagger documents", "merge additional specs into first/primary spec by copying their paths and definitions", &commands.MixinSpec{})
if err != nil {
log.Fatal(err)
}
_, err = parser.AddCommand("diff", "diff swagger documents", "diff specs showing which changes will break existing clients", &commands.DiffCommand{})
if err != nil {
log.Fatal(err)
}
genpar, err := parser.AddCommand("generate", "generate go code", "generate go code for the swagger spec file", &commands.Generate{})
if err != nil {
log.Fatalln(err)
}
for _, cmd := range genpar.Commands() {
switch cmd.Name {
case "spec":
cmd.ShortDescription = "generate a swagger spec document from a go application"
cmd.LongDescription = cmd.ShortDescription
case "client":
cmd.ShortDescription = "generate all the files for a client library"
cmd.LongDescription = cmd.ShortDescription
case "server":
cmd.ShortDescription = "generate all the files for a server application"
cmd.LongDescription = cmd.ShortDescription
case "model":
cmd.ShortDescription = "generate one or more models from the swagger spec"
cmd.LongDescription = cmd.ShortDescription
case "support":
cmd.ShortDescription = "generate supporting files like the main function and the api builder"
cmd.LongDescription = cmd.ShortDescription
case "operation":
cmd.ShortDescription = "generate one or more server operations from the swagger spec"
cmd.LongDescription = cmd.ShortDescription
}
}
opts.Quiet = func() {
log.SetOutput(ioutil.Discard)
}
opts.LogFile = func(logfile string) {
f, err := os.OpenFile(logfile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
log.Fatalf("cannot write to file %s: %v", logfile, err)
}
log.SetOutput(f)
}
if _, err := parser.Parse(); err != nil {
os.Exit(1)
}
}

View file

@ -0,0 +1,599 @@
package codescan
import (
"fmt"
"go/ast"
"go/types"
"log"
"os"
"strings"
"github.com/go-openapi/swag"
"golang.org/x/tools/go/packages"
"github.com/go-openapi/spec"
)
const pkgLoadMode = packages.NeedName | packages.NeedFiles | packages.NeedImports | packages.NeedDeps | packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo
func safeConvert(str string) bool {
b, err := swag.ConvertBool(str)
if err != nil {
return false
}
return b
}
// Debug is true when process is run with DEBUG=1 env var
var Debug = safeConvert(os.Getenv("DEBUG"))
type node uint32
const (
metaNode node = 1 << iota
routeNode
operationNode
modelNode
parametersNode
responseNode
)
// Options for the scanner
type Options struct {
Packages []string
InputSpec *spec.Swagger
ScanModels bool
WorkDir string
BuildTags string
ExcludeDeps bool
Include []string
Exclude []string
IncludeTags []string
ExcludeTags []string
}
type scanCtx struct {
pkgs []*packages.Package
app *typeIndex
}
func sliceToSet(names []string) map[string]bool {
result := make(map[string]bool)
for _, v := range names {
result[v] = true
}
return result
}
// Run the scanner to produce a spec with the options provided
func Run(opts *Options) (*spec.Swagger, error) {
sc, err := newScanCtx(opts)
if err != nil {
return nil, err
}
sb := newSpecBuilder(opts.InputSpec, sc, opts.ScanModels)
return sb.Build()
}
func newScanCtx(opts *Options) (*scanCtx, error) {
cfg := &packages.Config{
Dir: opts.WorkDir,
Mode: pkgLoadMode,
Tests: false,
}
if opts.BuildTags != "" {
cfg.BuildFlags = []string{"-tags", opts.BuildTags}
}
pkgs, err := packages.Load(cfg, opts.Packages...)
if err != nil {
return nil, err
}
app, err := newTypeIndex(pkgs, opts.ExcludeDeps,
sliceToSet(opts.IncludeTags), sliceToSet(opts.ExcludeTags),
opts.Include, opts.Exclude)
if err != nil {
return nil, err
}
return &scanCtx{
pkgs: pkgs,
app: app,
}, nil
}
type entityDecl struct {
Comments *ast.CommentGroup
Type *types.Named
Ident *ast.Ident
Spec *ast.TypeSpec
File *ast.File
Pkg *packages.Package
hasModelAnnotation bool
hasResponseAnnotation bool
hasParameterAnnotation bool
}
func (d *entityDecl) Names() (name, goName string) {
goName = d.Ident.Name
name = goName
if d.Comments == nil {
return
}
DECLS:
for _, cmt := range d.Comments.List {
for _, ln := range strings.Split(cmt.Text, "\n") {
matches := rxModelOverride.FindStringSubmatch(ln)
if len(matches) > 0 {
d.hasModelAnnotation = true
}
if len(matches) > 1 && len(matches[1]) > 0 {
name = matches[1]
break DECLS
}
}
}
return
}
func (d *entityDecl) ResponseNames() (name, goName string) {
goName = d.Ident.Name
name = goName
if d.Comments == nil {
return
}
DECLS:
for _, cmt := range d.Comments.List {
for _, ln := range strings.Split(cmt.Text, "\n") {
matches := rxResponseOverride.FindStringSubmatch(ln)
if len(matches) > 0 {
d.hasResponseAnnotation = true
}
if len(matches) > 1 && len(matches[1]) > 0 {
name = matches[1]
break DECLS
}
}
}
return
}
func (d *entityDecl) OperationIDS() (result []string) {
if d == nil || d.Comments == nil {
return nil
}
for _, cmt := range d.Comments.List {
for _, ln := range strings.Split(cmt.Text, "\n") {
matches := rxParametersOverride.FindStringSubmatch(ln)
if len(matches) > 0 {
d.hasParameterAnnotation = true
}
if len(matches) > 1 && len(matches[1]) > 0 {
for _, pt := range strings.Split(matches[1], " ") {
tr := strings.TrimSpace(pt)
if len(tr) > 0 {
result = append(result, tr)
}
}
}
}
}
return
}
func (d *entityDecl) HasModelAnnotation() bool {
if d.hasModelAnnotation {
return true
}
if d.Comments == nil {
return false
}
for _, cmt := range d.Comments.List {
for _, ln := range strings.Split(cmt.Text, "\n") {
matches := rxModelOverride.FindStringSubmatch(ln)
if len(matches) > 0 {
d.hasModelAnnotation = true
return true
}
}
}
return false
}
func (d *entityDecl) HasResponseAnnotation() bool {
if d.hasResponseAnnotation {
return true
}
if d.Comments == nil {
return false
}
for _, cmt := range d.Comments.List {
for _, ln := range strings.Split(cmt.Text, "\n") {
matches := rxResponseOverride.FindStringSubmatch(ln)
if len(matches) > 0 {
d.hasResponseAnnotation = true
return true
}
}
}
return false
}
func (d *entityDecl) HasParameterAnnotation() bool {
if d.hasParameterAnnotation {
return true
}
if d.Comments == nil {
return false
}
for _, cmt := range d.Comments.List {
for _, ln := range strings.Split(cmt.Text, "\n") {
matches := rxParametersOverride.FindStringSubmatch(ln)
if len(matches) > 0 {
d.hasParameterAnnotation = true
return true
}
}
}
return false
}
func (s *scanCtx) FindDecl(pkgPath, name string) (*entityDecl, bool) {
if pkg, ok := s.app.AllPackages[pkgPath]; ok {
for _, file := range pkg.Syntax {
for _, d := range file.Decls {
gd, ok := d.(*ast.GenDecl)
if !ok {
continue
}
for _, sp := range gd.Specs {
if ts, ok := sp.(*ast.TypeSpec); ok && ts.Name.Name == name {
def, ok := pkg.TypesInfo.Defs[ts.Name]
if !ok {
debugLog("couldn't find type info for %s", ts.Name)
continue
}
nt, isNamed := def.Type().(*types.Named)
if !isNamed {
debugLog("%s is not a named type but a %T", ts.Name, def.Type())
continue
}
decl := &entityDecl{
Comments: gd.Doc,
Type: nt,
Ident: ts.Name,
Spec: ts,
File: file,
Pkg: pkg,
}
return decl, true
}
}
}
}
}
return nil, false
}
func (s *scanCtx) FindModel(pkgPath, name string) (*entityDecl, bool) {
for _, cand := range s.app.Models {
ct := cand.Type.Obj()
if ct.Name() == name && ct.Pkg().Path() == pkgPath {
return cand, true
}
}
if decl, found := s.FindDecl(pkgPath, name); found {
s.app.Models[decl.Ident] = decl
return decl, true
}
return nil, false
}
func (s *scanCtx) PkgForPath(pkgPath string) (*packages.Package, bool) {
v, ok := s.app.AllPackages[pkgPath]
return v, ok
}
func (s *scanCtx) DeclForType(t types.Type) (*entityDecl, bool) {
switch tpe := t.(type) {
case *types.Pointer:
return s.DeclForType(tpe.Elem())
case *types.Named:
return s.FindDecl(tpe.Obj().Pkg().Path(), tpe.Obj().Name())
default:
log.Printf("unknown type to find the package for [%T]: %s", t, t.String())
return nil, false
}
}
func (s *scanCtx) PkgForType(t types.Type) (*packages.Package, bool) {
switch tpe := t.(type) {
// case *types.Basic:
// case *types.Struct:
// case *types.Pointer:
// case *types.Interface:
// case *types.Array:
// case *types.Slice:
// case *types.Map:
case *types.Named:
v, ok := s.app.AllPackages[tpe.Obj().Pkg().Path()]
return v, ok
default:
log.Printf("unknown type to find the package for [%T]: %s", t, t.String())
return nil, false
}
}
func (s *scanCtx) FindComments(pkg *packages.Package, name string) (*ast.CommentGroup, bool) {
for _, f := range pkg.Syntax {
for _, d := range f.Decls {
gd, ok := d.(*ast.GenDecl)
if !ok {
continue
}
for _, s := range gd.Specs {
if ts, ok := s.(*ast.TypeSpec); ok {
if ts.Name.Name == name {
return gd.Doc, true
}
}
}
}
}
return nil, false
}
func newTypeIndex(pkgs []*packages.Package,
excludeDeps bool, includeTags, excludeTags map[string]bool,
includePkgs, excludePkgs []string) (*typeIndex, error) {
ac := &typeIndex{
AllPackages: make(map[string]*packages.Package),
Models: make(map[*ast.Ident]*entityDecl),
excludeDeps: excludeDeps,
includeTags: includeTags,
excludeTags: excludeTags,
includePkgs: includePkgs,
excludePkgs: excludePkgs,
}
if err := ac.build(pkgs); err != nil {
return nil, err
}
return ac, nil
}
type typeIndex struct {
AllPackages map[string]*packages.Package
Models map[*ast.Ident]*entityDecl
Meta []metaSection
Routes []parsedPathContent
Operations []parsedPathContent
Parameters []*entityDecl
Responses []*entityDecl
excludeDeps bool
includeTags map[string]bool
excludeTags map[string]bool
includePkgs []string
excludePkgs []string
}
func (a *typeIndex) build(pkgs []*packages.Package) error {
for _, pkg := range pkgs {
if _, known := a.AllPackages[pkg.PkgPath]; known {
continue
}
a.AllPackages[pkg.PkgPath] = pkg
if err := a.processPackage(pkg); err != nil {
return err
}
if err := a.walkImports(pkg); err != nil {
return err
}
}
return nil
}
func (a *typeIndex) processPackage(pkg *packages.Package) error {
if !shouldAcceptPkg(pkg.PkgPath, a.includePkgs, a.excludePkgs) {
debugLog("package %s is ignored due to rules", pkg.Name)
return nil
}
for _, file := range pkg.Syntax {
n, err := a.detectNodes(file)
if err != nil {
return err
}
if n&metaNode != 0 {
a.Meta = append(a.Meta, metaSection{Comments: file.Doc})
}
if n&operationNode != 0 {
for _, cmts := range file.Comments {
pp := parsePathAnnotation(rxOperation, cmts.List)
if pp.Method == "" {
continue // not a valid operation
}
if !shouldAcceptTag(pp.Tags, a.includeTags, a.excludeTags) {
debugLog("operation %s %s is ignored due to tag rules", pp.Method, pp.Path)
continue
}
a.Operations = append(a.Operations, pp)
}
}
if n&routeNode != 0 {
for _, cmts := range file.Comments {
pp := parsePathAnnotation(rxRoute, cmts.List)
if pp.Method == "" {
continue // not a valid operation
}
if !shouldAcceptTag(pp.Tags, a.includeTags, a.excludeTags) {
debugLog("operation %s %s is ignored due to tag rules", pp.Method, pp.Path)
continue
}
a.Routes = append(a.Routes, pp)
}
}
for _, dt := range file.Decls {
switch fd := dt.(type) {
case *ast.BadDecl:
continue
case *ast.FuncDecl:
if fd.Body == nil {
continue
}
for _, stmt := range fd.Body.List {
if dstm, ok := stmt.(*ast.DeclStmt); ok {
if gd, isGD := dstm.Decl.(*ast.GenDecl); isGD {
a.processDecl(pkg, file, n, gd)
}
}
}
case *ast.GenDecl:
a.processDecl(pkg, file, n, fd)
}
}
}
return nil
}
func (a *typeIndex) processDecl(pkg *packages.Package, file *ast.File, n node, gd *ast.GenDecl) {
for _, sp := range gd.Specs {
switch ts := sp.(type) {
case *ast.ValueSpec:
debugLog("saw value spec: %v", ts.Names)
return
case *ast.ImportSpec:
debugLog("saw import spec: %v", ts.Name)
return
case *ast.TypeSpec:
def, ok := pkg.TypesInfo.Defs[ts.Name]
if !ok {
debugLog("couldn't find type info for %s", ts.Name)
//continue
}
nt, isNamed := def.Type().(*types.Named)
if !isNamed {
debugLog("%s is not a named type but a %T", ts.Name, def.Type())
//continue
}
decl := &entityDecl{
Comments: gd.Doc,
Type: nt,
Ident: ts.Name,
Spec: ts,
File: file,
Pkg: pkg,
}
key := ts.Name
if n&modelNode != 0 && decl.HasModelAnnotation() {
a.Models[key] = decl
}
if n&parametersNode != 0 && decl.HasParameterAnnotation() {
a.Parameters = append(a.Parameters, decl)
}
if n&responseNode != 0 && decl.HasResponseAnnotation() {
a.Responses = append(a.Responses, decl)
}
}
}
}
func (a *typeIndex) walkImports(pkg *packages.Package) error {
if a.excludeDeps {
return nil
}
for k := range pkg.Imports {
if _, known := a.AllPackages[k]; known {
continue
}
pk := pkg.Imports[k]
a.AllPackages[pk.PkgPath] = pk
if err := a.processPackage(pk); err != nil {
return err
}
if err := a.walkImports(pk); err != nil {
return err
}
}
return nil
}
func (a *typeIndex) detectNodes(file *ast.File) (node, error) {
var n node
for _, comments := range file.Comments {
var seenStruct string
for _, cline := range comments.List {
if cline == nil {
continue
}
}
for _, cline := range comments.List {
if cline == nil {
continue
}
matches := rxSwaggerAnnotation.FindStringSubmatch(cline.Text)
if len(matches) < 2 {
continue
}
switch matches[1] {
case "route":
n |= routeNode
case "operation":
n |= operationNode
case "model":
n |= modelNode
if seenStruct == "" || seenStruct == matches[1] {
seenStruct = matches[1]
} else {
return 0, fmt.Errorf("classifier: already annotated as %s, can't also be %q", seenStruct, matches[1])
}
case "meta":
n |= metaNode
case "parameters":
n |= parametersNode
if seenStruct == "" || seenStruct == matches[1] {
seenStruct = matches[1]
} else {
return 0, fmt.Errorf("classifier: already annotated as %s, can't also be %q", seenStruct, matches[1])
}
case "response":
n |= responseNode
if seenStruct == "" || seenStruct == matches[1] {
seenStruct = matches[1]
} else {
return 0, fmt.Errorf("classifier: already annotated as %s, can't also be %q", seenStruct, matches[1])
}
case "strfmt", "name", "discriminated", "file", "enum", "default", "alias", "type":
// TODO: perhaps collect these and pass along to avoid lookups later on
case "allOf":
case "ignore":
default:
return 0, fmt.Errorf("classifier: unknown swagger annotation %q", matches[1])
}
}
}
return n, nil
}
func debugLog(format string, args ...interface{}) {
if Debug {
log.Printf(format, args...)
}
}

View file

@ -0,0 +1,248 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package codescan
import (
"encoding/json"
"fmt"
"go/ast"
"net/mail"
"regexp"
"strings"
"github.com/go-openapi/spec"
)
type metaSection struct {
Comments *ast.CommentGroup
}
func metaTOSSetter(meta *spec.Info) func([]string) {
return func(lines []string) {
meta.TermsOfService = joinDropLast(lines)
}
}
func metaConsumesSetter(meta *spec.Swagger) func([]string) {
return func(consumes []string) { meta.Consumes = consumes }
}
func metaProducesSetter(meta *spec.Swagger) func([]string) {
return func(produces []string) { meta.Produces = produces }
}
func metaSchemeSetter(meta *spec.Swagger) func([]string) {
return func(schemes []string) { meta.Schemes = schemes }
}
func metaSecuritySetter(meta *spec.Swagger) func([]map[string][]string) {
return func(secDefs []map[string][]string) { meta.Security = secDefs }
}
func metaSecurityDefinitionsSetter(meta *spec.Swagger) func(json.RawMessage) error {
return func(jsonValue json.RawMessage) error {
var jsonData spec.SecurityDefinitions
err := json.Unmarshal(jsonValue, &jsonData)
if err != nil {
return err
}
meta.SecurityDefinitions = jsonData
return nil
}
}
func metaVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
return func(jsonValue json.RawMessage) error {
var jsonData spec.Extensions
err := json.Unmarshal(jsonValue, &jsonData)
if err != nil {
return err
}
for k := range jsonData {
if !rxAllowedExtensions.MatchString(k) {
return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
}
}
meta.Extensions = jsonData
return nil
}
}
func infoVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
return func(jsonValue json.RawMessage) error {
var jsonData spec.Extensions
err := json.Unmarshal(jsonValue, &jsonData)
if err != nil {
return err
}
for k := range jsonData {
if !rxAllowedExtensions.MatchString(k) {
return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
}
}
meta.Info.Extensions = jsonData
return nil
}
}
func newMetaParser(swspec *spec.Swagger) *sectionedParser {
sp := new(sectionedParser)
if swspec.Info == nil {
swspec.Info = new(spec.Info)
}
info := swspec.Info
sp.setTitle = func(lines []string) {
tosave := joinDropLast(lines)
if len(tosave) > 0 {
tosave = rxStripTitleComments.ReplaceAllString(tosave, "")
}
info.Title = tosave
}
sp.setDescription = func(lines []string) { info.Description = joinDropLast(lines) }
sp.taggers = []tagParser{
newMultiLineTagParser("TOS", newMultilineDropEmptyParser(rxTOS, metaTOSSetter(info)), false),
newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, metaConsumesSetter(swspec)), false),
newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, metaProducesSetter(swspec)), false),
newSingleLineTagParser("Schemes", newSetSchemes(metaSchemeSetter(swspec))),
newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, metaSecuritySetter(swspec)), false),
newMultiLineTagParser("SecurityDefinitions", newYamlParser(rxSecurity, metaSecurityDefinitionsSetter(swspec)), true),
newSingleLineTagParser("Version", &setMetaSingle{swspec, rxVersion, setInfoVersion}),
newSingleLineTagParser("Host", &setMetaSingle{swspec, rxHost, setSwaggerHost}),
newSingleLineTagParser("BasePath", &setMetaSingle{swspec, rxBasePath, setSwaggerBasePath}),
newSingleLineTagParser("Contact", &setMetaSingle{swspec, rxContact, setInfoContact}),
newSingleLineTagParser("License", &setMetaSingle{swspec, rxLicense, setInfoLicense}),
newMultiLineTagParser("YAMLInfoExtensionsBlock", newYamlParser(rxInfoExtensions, infoVendorExtensibleSetter(swspec)), true),
newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, metaVendorExtensibleSetter(swspec)), true),
}
return sp
}
type setMetaSingle struct {
spec *spec.Swagger
rx *regexp.Regexp
set func(spec *spec.Swagger, lines []string) error
}
func (s *setMetaSingle) Matches(line string) bool {
return s.rx.MatchString(line)
}
func (s *setMetaSingle) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := s.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
return s.set(s.spec, []string{matches[1]})
}
return nil
}
func setSwaggerHost(swspec *spec.Swagger, lines []string) error {
lns := lines
if len(lns) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
lns = []string{"localhost"}
}
swspec.Host = lns[0]
return nil
}
func setSwaggerBasePath(swspec *spec.Swagger, lines []string) error {
var ln string
if len(lines) > 0 {
ln = lines[0]
}
swspec.BasePath = ln
return nil
}
func setInfoVersion(swspec *spec.Swagger, lines []string) error {
if len(lines) == 0 {
return nil
}
info := safeInfo(swspec)
info.Version = strings.TrimSpace(lines[0])
return nil
}
func setInfoContact(swspec *spec.Swagger, lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
contact, err := parseContactInfo(lines[0])
if err != nil {
return err
}
info := safeInfo(swspec)
info.Contact = contact
return nil
}
func parseContactInfo(line string) (*spec.ContactInfo, error) {
nameEmail, url := splitURL(line)
var name, email string
if len(nameEmail) > 0 {
addr, err := mail.ParseAddress(nameEmail)
if err != nil {
return nil, err
}
name, email = addr.Name, addr.Address
}
return &spec.ContactInfo{
URL: url,
Name: name,
Email: email,
}, nil
}
func setInfoLicense(swspec *spec.Swagger, lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
info := safeInfo(swspec)
line := lines[0]
name, url := splitURL(line)
info.License = &spec.License{
Name: name,
URL: url,
}
return nil
}
func safeInfo(swspec *spec.Swagger) *spec.Info {
if swspec.Info == nil {
swspec.Info = new(spec.Info)
}
return swspec.Info
}
// httpFTPScheme matches http://, https://, ws://, wss://
var httpFTPScheme = regexp.MustCompile("(?:(?:ht|f)tp|ws)s?://")
func splitURL(line string) (notURL, url string) {
str := strings.TrimSpace(line)
parts := httpFTPScheme.FindStringIndex(str)
if len(parts) == 0 {
if len(str) > 0 {
notURL = str
}
return
}
if len(parts) > 0 {
notURL = strings.TrimSpace(str[:parts[0]])
url = strings.TrimSpace(str[parts[0]:])
}
return
}

View file

@ -0,0 +1,170 @@
package codescan
import (
"fmt"
"go/ast"
"regexp"
"strings"
"github.com/go-openapi/spec"
)
type operationsBuilder struct {
ctx *scanCtx
path parsedPathContent
operations map[string]*spec.Operation
}
func (o *operationsBuilder) Build(tgt *spec.Paths) error {
pthObj := tgt.Paths[o.path.Path]
op := setPathOperation(
o.path.Method, o.path.ID,
&pthObj, o.operations[o.path.ID])
op.Tags = o.path.Tags
sp := new(yamlSpecScanner)
sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
if err := sp.Parse(o.path.Remaining); err != nil {
return fmt.Errorf("operation (%s): %v", op.ID, err)
}
if err := sp.UnmarshalSpec(op.UnmarshalJSON); err != nil {
return fmt.Errorf("operation (%s): %v", op.ID, err)
}
if tgt.Paths == nil {
tgt.Paths = make(map[string]spec.PathItem)
}
tgt.Paths[o.path.Path] = pthObj
return nil
}
type parsedPathContent struct {
Method, Path, ID string
Tags []string
Remaining *ast.CommentGroup
}
func parsePathAnnotation(annotation *regexp.Regexp, lines []*ast.Comment) (cnt parsedPathContent) {
var justMatched bool
for _, cmt := range lines {
txt := cmt.Text
for _, line := range strings.Split(txt, "\n") {
matches := annotation.FindStringSubmatch(line)
if len(matches) > 3 {
cnt.Method, cnt.Path, cnt.ID = matches[1], matches[2], matches[len(matches)-1]
cnt.Tags = rxSpace.Split(matches[3], -1)
if len(matches[3]) == 0 {
cnt.Tags = nil
}
justMatched = true
} else if cnt.Method != "" {
if cnt.Remaining == nil {
cnt.Remaining = new(ast.CommentGroup)
}
if !justMatched || strings.TrimSpace(rxStripComments.ReplaceAllString(line, "")) != "" {
cc := new(ast.Comment)
cc.Slash = cmt.Slash
cc.Text = line
cnt.Remaining.List = append(cnt.Remaining.List, cc)
justMatched = false
}
}
}
}
return
}
func setPathOperation(method, id string, pthObj *spec.PathItem, op *spec.Operation) *spec.Operation {
if op == nil {
op = new(spec.Operation)
op.ID = id
}
switch strings.ToUpper(method) {
case "GET":
if pthObj.Get != nil {
if id == pthObj.Get.ID {
op = pthObj.Get
} else {
pthObj.Get = op
}
} else {
pthObj.Get = op
}
case "POST":
if pthObj.Post != nil {
if id == pthObj.Post.ID {
op = pthObj.Post
} else {
pthObj.Post = op
}
} else {
pthObj.Post = op
}
case "PUT":
if pthObj.Put != nil {
if id == pthObj.Put.ID {
op = pthObj.Put
} else {
pthObj.Put = op
}
} else {
pthObj.Put = op
}
case "PATCH":
if pthObj.Patch != nil {
if id == pthObj.Patch.ID {
op = pthObj.Patch
} else {
pthObj.Patch = op
}
} else {
pthObj.Patch = op
}
case "HEAD":
if pthObj.Head != nil {
if id == pthObj.Head.ID {
op = pthObj.Head
} else {
pthObj.Head = op
}
} else {
pthObj.Head = op
}
case "DELETE":
if pthObj.Delete != nil {
if id == pthObj.Delete.ID {
op = pthObj.Delete
} else {
pthObj.Delete = op
}
} else {
pthObj.Delete = op
}
case "OPTIONS":
if pthObj.Options != nil {
if id == pthObj.Options.ID {
op = pthObj.Options
} else {
pthObj.Options = op
}
} else {
pthObj.Options = op
}
}
return op
}

View file

@ -0,0 +1,482 @@
package codescan
import (
"fmt"
"go/ast"
"go/types"
"strings"
"golang.org/x/tools/go/ast/astutil"
"github.com/pkg/errors"
"github.com/go-openapi/spec"
)
type paramTypable struct {
param *spec.Parameter
}
func (pt paramTypable) Level() int { return 0 }
func (pt paramTypable) Typed(tpe, format string) {
pt.param.Typed(tpe, format)
}
func (pt paramTypable) SetRef(ref spec.Ref) {
pt.param.Ref = ref
}
func (pt paramTypable) Items() swaggerTypable {
bdt, schema := bodyTypable(pt.param.In, pt.param.Schema)
if bdt != nil {
pt.param.Schema = schema
return bdt
}
if pt.param.Items == nil {
pt.param.Items = new(spec.Items)
}
pt.param.Type = "array"
return itemsTypable{pt.param.Items, 1}
}
func (pt paramTypable) Schema() *spec.Schema {
if pt.param.In != "body" {
return nil
}
if pt.param.Schema == nil {
pt.param.Schema = new(spec.Schema)
}
return pt.param.Schema
}
func (pt paramTypable) AddExtension(key string, value interface{}) {
if pt.param.In == "body" {
pt.Schema().AddExtension(key, value)
} else {
pt.param.AddExtension(key, value)
}
}
type itemsTypable struct {
items *spec.Items
level int
}
func (pt itemsTypable) Level() int { return pt.level }
func (pt itemsTypable) Typed(tpe, format string) {
pt.items.Typed(tpe, format)
}
func (pt itemsTypable) SetRef(ref spec.Ref) {
pt.items.Ref = ref
}
func (pt itemsTypable) Schema() *spec.Schema {
return nil
}
func (pt itemsTypable) Items() swaggerTypable {
if pt.items.Items == nil {
pt.items.Items = new(spec.Items)
}
pt.items.Type = "array"
return itemsTypable{pt.items.Items, pt.level + 1}
}
func (pt itemsTypable) AddExtension(key string, value interface{}) {
pt.items.AddExtension(key, value)
}
type paramValidations struct {
current *spec.Parameter
}
func (sv paramValidations) SetMaximum(val float64, exclusive bool) {
sv.current.Maximum = &val
sv.current.ExclusiveMaximum = exclusive
}
func (sv paramValidations) SetMinimum(val float64, exclusive bool) {
sv.current.Minimum = &val
sv.current.ExclusiveMinimum = exclusive
}
func (sv paramValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
func (sv paramValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
func (sv paramValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
func (sv paramValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
func (sv paramValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
func (sv paramValidations) SetPattern(val string) { sv.current.Pattern = val }
func (sv paramValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
func (sv paramValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
func (sv paramValidations) SetEnum(val string) {
sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
}
func (sv paramValidations) SetDefault(val interface{}) { sv.current.Default = val }
func (sv paramValidations) SetExample(val interface{}) { sv.current.Example = val }
type itemsValidations struct {
current *spec.Items
}
func (sv itemsValidations) SetMaximum(val float64, exclusive bool) {
sv.current.Maximum = &val
sv.current.ExclusiveMaximum = exclusive
}
func (sv itemsValidations) SetMinimum(val float64, exclusive bool) {
sv.current.Minimum = &val
sv.current.ExclusiveMinimum = exclusive
}
func (sv itemsValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
func (sv itemsValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
func (sv itemsValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
func (sv itemsValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
func (sv itemsValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
func (sv itemsValidations) SetPattern(val string) { sv.current.Pattern = val }
func (sv itemsValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
func (sv itemsValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
func (sv itemsValidations) SetEnum(val string) {
sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
}
func (sv itemsValidations) SetDefault(val interface{}) { sv.current.Default = val }
func (sv itemsValidations) SetExample(val interface{}) { sv.current.Example = val }
type parameterBuilder struct {
ctx *scanCtx
decl *entityDecl
postDecls []*entityDecl
}
func (p *parameterBuilder) Build(operations map[string]*spec.Operation) error {
// check if there is a swagger:parameters tag that is followed by one or more words,
// these words are the ids of the operations this parameter struct applies to
// once type name is found convert it to a schema, by looking up the schema in the
// parameters dictionary that got passed into this parse method
for _, opid := range p.decl.OperationIDS() {
operation, ok := operations[opid]
if !ok {
operation = new(spec.Operation)
operations[opid] = operation
operation.ID = opid
}
debugLog("building parameters for: %s", opid)
// analyze struct body for fields etc
// each exported struct field:
// * gets a type mapped to a go primitive
// * perhaps gets a format
// * has to document the validations that apply for the type and the field
// * when the struct field points to a model it becomes a ref: #/definitions/ModelName
// * comments that aren't tags is used as the description
if err := p.buildFromType(p.decl.Type, operation, make(map[string]spec.Parameter)); err != nil {
return err
}
}
return nil
}
func (p *parameterBuilder) buildFromType(otpe types.Type, op *spec.Operation, seen map[string]spec.Parameter) error {
switch tpe := otpe.(type) {
case *types.Pointer:
return p.buildFromType(tpe.Elem(), op, seen)
case *types.Named:
o := tpe.Obj()
switch stpe := o.Type().Underlying().(type) {
case *types.Struct:
debugLog("build from type %s: %T", tpe.Obj().Name(), otpe)
if decl, found := p.ctx.DeclForType(o.Type()); found {
return p.buildFromStruct(decl, stpe, op, seen)
}
return p.buildFromStruct(p.decl, stpe, op, seen)
default:
return errors.Errorf("unhandled type (%T): %s", stpe, o.Type().Underlying().String())
}
default:
return errors.Errorf("unhandled type (%T): %s", otpe, tpe.String())
}
}
func (p *parameterBuilder) buildFromField(fld *types.Var, tpe types.Type, typable swaggerTypable, seen map[string]spec.Parameter) error {
debugLog("build from field %s: %T", fld.Name(), tpe)
switch ftpe := tpe.(type) {
case *types.Basic:
return swaggerSchemaForType(ftpe.Name(), typable)
case *types.Struct:
sb := schemaBuilder{
decl: p.decl,
ctx: p.ctx,
}
if err := sb.buildFromType(tpe, typable); err != nil {
return err
}
p.postDecls = append(p.postDecls, sb.postDecls...)
return nil
case *types.Pointer:
return p.buildFromField(fld, ftpe.Elem(), typable, seen)
case *types.Interface:
sb := schemaBuilder{
decl: p.decl,
ctx: p.ctx,
}
if err := sb.buildFromType(tpe, typable); err != nil {
return err
}
p.postDecls = append(p.postDecls, sb.postDecls...)
return nil
case *types.Array:
return p.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
case *types.Slice:
return p.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
case *types.Map:
schema := new(spec.Schema)
typable.Schema().Typed("object", "").AdditionalProperties = &spec.SchemaOrBool{
Schema: schema,
}
sb := schemaBuilder{
decl: p.decl,
ctx: p.ctx,
}
if err := sb.buildFromType(ftpe.Elem(), schemaTypable{schema, typable.Level() + 1}); err != nil {
return err
}
return nil
case *types.Named:
if decl, found := p.ctx.DeclForType(ftpe.Obj().Type()); found {
if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" {
typable.Typed("string", "date-time")
return nil
}
if sfnm, isf := strfmtName(decl.Comments); isf {
typable.Typed("string", sfnm)
return nil
}
//if err := r.makeRef(decl, typable); err != nil {
// return err
//}
sb := &schemaBuilder{ctx: p.ctx, decl: decl}
sb.inferNames()
if err := sb.buildFromType(decl.Type, typable); err != nil {
return err
}
p.postDecls = append(p.postDecls, sb.postDecls...)
return nil
}
return errors.Errorf("unable to find package and source file for: %s", ftpe.String())
default:
return errors.Errorf("unknown type for %s: %T", fld.String(), fld.Type())
}
}
func (p *parameterBuilder) buildFromStruct(decl *entityDecl, tpe *types.Struct, op *spec.Operation, seen map[string]spec.Parameter) error {
if tpe.NumFields() == 0 {
return nil
}
var sequence []string
for i := 0; i < tpe.NumFields(); i++ {
fld := tpe.Field(i)
if fld.Embedded() {
if err := p.buildFromType(fld.Type(), op, seen); err != nil {
return err
}
continue
}
tg := tpe.Tag(i)
var afld *ast.Field
ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
for _, an := range ans {
at, valid := an.(*ast.Field)
if !valid {
continue
}
debugLog("field %s: %s(%T) [%q] ==> %s", fld.Name(), fld.Type().String(), fld.Type(), tg, at.Doc.Text())
afld = at
break
}
if afld == nil {
debugLog("can't find source associated with %s for %s", fld.String(), tpe.String())
continue
}
// if the field is annotated with swagger:ignore, ignore it
if ignored(afld.Doc) {
continue
}
name, ignore, _, err := parseJSONTag(afld)
if err != nil {
return err
}
if ignore {
continue
}
in := "query"
// scan for param location first, this changes some behavior down the line
if afld.Doc != nil {
for _, cmt := range afld.Doc.List {
for _, line := range strings.Split(cmt.Text, "\n") {
matches := rxIn.FindStringSubmatch(line)
if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
in = strings.TrimSpace(matches[1])
}
}
}
}
ps := seen[name]
ps.In = in
var pty swaggerTypable = paramTypable{&ps}
if in == "body" {
pty = schemaTypable{pty.Schema(), 0}
}
if in == "formData" && afld.Doc != nil && fileParam(afld.Doc) {
pty.Typed("file", "")
} else if err := p.buildFromField(fld, fld.Type(), pty, seen); err != nil {
return err
}
if strfmtName, ok := strfmtName(afld.Doc); ok {
ps.Typed("string", strfmtName)
ps.Ref = spec.Ref{}
ps.Items = nil
}
sp := new(sectionedParser)
sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
if ps.Ref.String() == "" {
sp.taggers = []tagParser{
newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
newSingleLineTagParser("maximum", &setMaximum{paramValidations{&ps}, rxf(rxMaximumFmt, "")}),
newSingleLineTagParser("minimum", &setMinimum{paramValidations{&ps}, rxf(rxMinimumFmt, "")}),
newSingleLineTagParser("multipleOf", &setMultipleOf{paramValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
newSingleLineTagParser("minLength", &setMinLength{paramValidations{&ps}, rxf(rxMinLengthFmt, "")}),
newSingleLineTagParser("maxLength", &setMaxLength{paramValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
newSingleLineTagParser("pattern", &setPattern{paramValidations{&ps}, rxf(rxPatternFmt, "")}),
newSingleLineTagParser("collectionFormat", &setCollectionFormat{paramValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
newSingleLineTagParser("minItems", &setMinItems{paramValidations{&ps}, rxf(rxMinItemsFmt, "")}),
newSingleLineTagParser("maxItems", &setMaxItems{paramValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
newSingleLineTagParser("unique", &setUnique{paramValidations{&ps}, rxf(rxUniqueFmt, "")}),
newSingleLineTagParser("enum", &setEnum{paramValidations{&ps}, rxf(rxEnumFmt, "")}),
newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxDefaultFmt, "")}),
newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxExampleFmt, "")}),
newSingleLineTagParser("required", &setRequiredParam{&ps}),
}
itemsTaggers := func(items *spec.Items, level int) []tagParser {
// the expression is 1-index based not 0-index
itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
return []tagParser{
newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
}
}
var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
if items == nil {
return []tagParser{}, nil
}
switch iftpe := expr.(type) {
case *ast.ArrayType:
eleTaggers := itemsTaggers(items, level)
sp.taggers = append(eleTaggers, sp.taggers...)
otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
if err != nil {
return nil, err
}
return otherTaggers, nil
case *ast.SelectorExpr:
otherTaggers, err := parseArrayTypes(iftpe.Sel, items.Items, level+1)
if err != nil {
return nil, err
}
return otherTaggers, nil
case *ast.Ident:
taggers := []tagParser{}
if iftpe.Obj == nil {
taggers = itemsTaggers(items, level)
}
otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
if err != nil {
return nil, err
}
return append(taggers, otherTaggers...), nil
case *ast.StarExpr:
otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
if err != nil {
return nil, err
}
return otherTaggers, nil
default:
return nil, fmt.Errorf("unknown field type ele for %q", name)
}
}
// check if this is a primitive, if so parse the validations from the
// doc comments of the slice declaration.
if ftped, ok := afld.Type.(*ast.ArrayType); ok {
taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
if err != nil {
return err
}
sp.taggers = append(taggers, sp.taggers...)
}
} else {
sp.taggers = []tagParser{
newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
newSingleLineTagParser("required", &matchOnlyParam{&ps, rxRequired}),
}
}
if err := sp.Parse(afld.Doc); err != nil {
return err
}
if ps.In == "path" {
ps.Required = true
}
if ps.Name == "" {
ps.Name = name
}
if name != fld.Name() {
addExtension(&ps.VendorExtensible, "x-go-name", fld.Name())
}
seen[name] = ps
sequence = append(sequence, name)
}
for _, k := range sequence {
p := seen[k]
for i, v := range op.Parameters {
if v.Name == k {
op.Parameters = append(op.Parameters[:i], op.Parameters[i+1:]...)
break
}
}
op.Parameters = append(op.Parameters, p)
}
return nil
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,94 @@
package codescan
import "regexp"
const (
rxMethod = "(\\p{L}+)"
rxPath = "((?:/[\\p{L}\\p{N}\\p{Pd}\\p{Pc}{}\\-\\.\\?_~%!$&'()*+,;=:@/]*)+/?)"
rxOpTags = "(\\p{L}[\\p{L}\\p{N}\\p{Pd}\\.\\p{Pc}\\p{Zs}]+)"
rxOpID = "((?:\\p{L}[\\p{L}\\p{N}\\p{Pd}\\p{Pc}]+)+)"
rxMaximumFmt = "%s[Mm]ax(?:imum)?\\p{Zs}*:\\p{Zs}*([\\<=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
rxMinimumFmt = "%s[Mm]in(?:imum)?\\p{Zs}*:\\p{Zs}*([\\>=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
rxMultipleOfFmt = "%s[Mm]ultiple\\p{Zs}*[Oo]f\\p{Zs}*:\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
rxMaxLengthFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
rxMinLengthFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
rxPatternFmt = "%s[Pp]attern\\p{Zs}*:\\p{Zs}*(.*)$"
rxCollectionFormatFmt = "%s[Cc]ollection(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ff]ormat)\\p{Zs}*:\\p{Zs}*(.*)$"
rxEnumFmt = "%s[Ee]num\\p{Zs}*:\\p{Zs}*(.*)$"
rxDefaultFmt = "%s[Dd]efault\\p{Zs}*:\\p{Zs}*(.*)$"
rxExampleFmt = "%s[Ee]xample\\p{Zs}*:\\p{Zs}*(.*)$"
rxMaxItemsFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
rxMinItemsFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
rxUniqueFmt = "%s[Uu]nique\\p{Zs}*:\\p{Zs}*(true|false)$"
rxItemsPrefixFmt = "(?:[Ii]tems[\\.\\p{Zs}]*){%d}"
)
var (
rxSwaggerAnnotation = regexp.MustCompile(`swagger:([\p{L}\p{N}\p{Pd}\p{Pc}]+)`)
rxFileUpload = regexp.MustCompile(`swagger:file`)
rxStrFmt = regexp.MustCompile(`swagger:strfmt\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
rxAlias = regexp.MustCompile(`swagger:alias`)
rxName = regexp.MustCompile(`swagger:name\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)$`)
rxAllOf = regexp.MustCompile(`swagger:allOf\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)?$`)
rxModelOverride = regexp.MustCompile(`swagger:model\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
rxResponseOverride = regexp.MustCompile(`swagger:response\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
rxParametersOverride = regexp.MustCompile(`swagger:parameters\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\p{Zs}]+)$`)
rxEnum = regexp.MustCompile(`swagger:enum\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
rxIgnoreOverride = regexp.MustCompile(`swagger:ignore\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
rxDefault = regexp.MustCompile(`swagger:default\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
rxType = regexp.MustCompile(`swagger:type\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
rxRoute = regexp.MustCompile(
"swagger:route\\p{Zs}*" +
rxMethod +
"\\p{Zs}*" +
rxPath +
"(?:\\p{Zs}+" +
rxOpTags +
")?\\p{Zs}+" +
rxOpID + "\\p{Zs}*$")
rxBeginYAMLSpec = regexp.MustCompile(`---\p{Zs}*$`)
rxUncommentHeaders = regexp.MustCompile(`^[\p{Zs}\t/\*-]*\|?`)
rxUncommentYAML = regexp.MustCompile(`^[\p{Zs}\t]*/*`)
rxOperation = regexp.MustCompile(
"swagger:operation\\p{Zs}*" +
rxMethod +
"\\p{Zs}*" +
rxPath +
"(?:\\p{Zs}+" +
rxOpTags +
")?\\p{Zs}+" +
rxOpID + "\\p{Zs}*$")
rxSpace = regexp.MustCompile(`\p{Zs}+`)
rxIndent = regexp.MustCompile(`\p{Zs}*/*\p{Zs}*[^\p{Zs}]`)
rxPunctuationEnd = regexp.MustCompile(`\p{Po}$`)
rxStripComments = regexp.MustCompile(`^[^\p{L}\p{N}\p{Pd}\p{Pc}\+]*`)
rxStripTitleComments = regexp.MustCompile(`^[^\p{L}]*[Pp]ackage\p{Zs}+[^\p{Zs}]+\p{Zs}*`)
rxAllowedExtensions = regexp.MustCompile(`^[Xx]-`)
rxIn = regexp.MustCompile(`[Ii]n\p{Zs}*:\p{Zs}*(query|path|header|body|formData)$`)
rxRequired = regexp.MustCompile(`[Rr]equired\p{Zs}*:\p{Zs}*(true|false)$`)
rxDiscriminator = regexp.MustCompile(`[Dd]iscriminator\p{Zs}*:\p{Zs}*(true|false)$`)
rxReadOnly = regexp.MustCompile(`[Rr]ead(?:\p{Zs}*|[\p{Pd}\p{Pc}])?[Oo]nly\p{Zs}*:\p{Zs}*(true|false)$`)
rxConsumes = regexp.MustCompile(`[Cc]onsumes\p{Zs}*:`)
rxProduces = regexp.MustCompile(`[Pp]roduces\p{Zs}*:`)
rxSecuritySchemes = regexp.MustCompile(`[Ss]ecurity\p{Zs}*:`)
rxSecurity = regexp.MustCompile(`[Ss]ecurity\p{Zs}*[Dd]efinitions:`)
rxResponses = regexp.MustCompile(`[Rr]esponses\p{Zs}*:`)
rxParameters = regexp.MustCompile(`[Pp]arameters\p{Zs}*:`)
rxSchemes = regexp.MustCompile(`[Ss]chemes\p{Zs}*:\p{Zs}*((?:(?:https?|HTTPS?|wss?|WSS?)[\p{Zs},]*)+)$`)
rxVersion = regexp.MustCompile(`[Vv]ersion\p{Zs}*:\p{Zs}*(.+)$`)
rxHost = regexp.MustCompile(`[Hh]ost\p{Zs}*:\p{Zs}*(.+)$`)
rxBasePath = regexp.MustCompile(`[Bb]ase\p{Zs}*-*[Pp]ath\p{Zs}*:\p{Zs}*` + rxPath + "$")
rxLicense = regexp.MustCompile(`[Ll]icense\p{Zs}*:\p{Zs}*(.+)$`)
rxContact = regexp.MustCompile(`[Cc]ontact\p{Zs}*-?(?:[Ii]info\p{Zs}*)?:\p{Zs}*(.+)$`)
rxTOS = regexp.MustCompile(`[Tt](:?erms)?\p{Zs}*-?[Oo]f?\p{Zs}*-?[Ss](?:ervice)?\p{Zs}*:`)
rxExtensions = regexp.MustCompile(`[Ee]xtensions\p{Zs}*:`)
rxInfoExtensions = regexp.MustCompile(`[In]nfo\p{Zs}*[Ee]xtensions:`)
rxDeprecated = regexp.MustCompile(`[Dd]eprecated\p{Zs}*:\p{Zs}*(true|false)$`)
// currently unused: rxExample = regexp.MustCompile(`[Ex]ample\p{Zs}*:\p{Zs}*(.*)$`)
)

View file

@ -0,0 +1,437 @@
package codescan
import (
"fmt"
"go/ast"
"go/types"
"strings"
"github.com/pkg/errors"
"golang.org/x/tools/go/ast/astutil"
"github.com/go-openapi/spec"
)
type responseTypable struct {
in string
header *spec.Header
response *spec.Response
}
func (ht responseTypable) Level() int { return 0 }
func (ht responseTypable) Typed(tpe, format string) {
ht.header.Typed(tpe, format)
}
func bodyTypable(in string, schema *spec.Schema) (swaggerTypable, *spec.Schema) {
if in == "body" {
// get the schema for items on the schema property
if schema == nil {
schema = new(spec.Schema)
}
if schema.Items == nil {
schema.Items = new(spec.SchemaOrArray)
}
if schema.Items.Schema == nil {
schema.Items.Schema = new(spec.Schema)
}
schema.Typed("array", "")
return schemaTypable{schema.Items.Schema, 1}, schema
}
return nil, nil
}
func (ht responseTypable) Items() swaggerTypable {
bdt, schema := bodyTypable(ht.in, ht.response.Schema)
if bdt != nil {
ht.response.Schema = schema
return bdt
}
if ht.header.Items == nil {
ht.header.Items = new(spec.Items)
}
ht.header.Type = "array"
return itemsTypable{ht.header.Items, 1}
}
func (ht responseTypable) SetRef(ref spec.Ref) {
// having trouble seeing the usefulness of this one here
ht.Schema().Ref = ref
}
func (ht responseTypable) Schema() *spec.Schema {
if ht.response.Schema == nil {
ht.response.Schema = new(spec.Schema)
}
return ht.response.Schema
}
func (ht responseTypable) SetSchema(schema *spec.Schema) {
ht.response.Schema = schema
}
func (ht responseTypable) CollectionOf(items *spec.Items, format string) {
ht.header.CollectionOf(items, format)
}
func (ht responseTypable) AddExtension(key string, value interface{}) {
ht.response.AddExtension(key, value)
}
type headerValidations struct {
current *spec.Header
}
func (sv headerValidations) SetMaximum(val float64, exclusive bool) {
sv.current.Maximum = &val
sv.current.ExclusiveMaximum = exclusive
}
func (sv headerValidations) SetMinimum(val float64, exclusive bool) {
sv.current.Minimum = &val
sv.current.ExclusiveMinimum = exclusive
}
func (sv headerValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
func (sv headerValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
func (sv headerValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
func (sv headerValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
func (sv headerValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
func (sv headerValidations) SetPattern(val string) { sv.current.Pattern = val }
func (sv headerValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
func (sv headerValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
func (sv headerValidations) SetEnum(val string) {
sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
}
func (sv headerValidations) SetDefault(val interface{}) { sv.current.Default = val }
func (sv headerValidations) SetExample(val interface{}) { sv.current.Example = val }
type responseBuilder struct {
ctx *scanCtx
decl *entityDecl
postDecls []*entityDecl
}
func (r *responseBuilder) Build(responses map[string]spec.Response) error {
// check if there is a swagger:response tag that is followed by one or more words,
// these words are the ids of the operations this parameter struct applies to
// once type name is found convert it to a schema, by looking up the schema in the
// parameters dictionary that got passed into this parse method
name, _ := r.decl.ResponseNames()
response := responses[name]
debugLog("building response: %s", name)
// analyze doc comment for the model
sp := new(sectionedParser)
sp.setDescription = func(lines []string) { response.Description = joinDropLast(lines) }
if err := sp.Parse(r.decl.Comments); err != nil {
return err
}
// analyze struct body for fields etc
// each exported struct field:
// * gets a type mapped to a go primitive
// * perhaps gets a format
// * has to document the validations that apply for the type and the field
// * when the struct field points to a model it becomes a ref: #/definitions/ModelName
// * comments that aren't tags is used as the description
if err := r.buildFromType(r.decl.Type, &response, make(map[string]bool)); err != nil {
return err
}
responses[name] = response
return nil
}
func (r *responseBuilder) buildFromField(fld *types.Var, tpe types.Type, typable swaggerTypable, seen map[string]bool) error {
debugLog("build from field %s: %T", fld.Name(), tpe)
switch ftpe := tpe.(type) {
case *types.Basic:
return swaggerSchemaForType(ftpe.Name(), typable)
case *types.Struct:
sb := schemaBuilder{
decl: r.decl,
ctx: r.ctx,
}
if err := sb.buildFromType(tpe, typable); err != nil {
return err
}
r.postDecls = append(r.postDecls, sb.postDecls...)
return nil
case *types.Pointer:
return r.buildFromField(fld, ftpe.Elem(), typable, seen)
case *types.Interface:
sb := schemaBuilder{
decl: r.decl,
ctx: r.ctx,
}
if err := sb.buildFromType(tpe, typable); err != nil {
return err
}
r.postDecls = append(r.postDecls, sb.postDecls...)
return nil
case *types.Array:
return r.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
case *types.Slice:
return r.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
case *types.Map:
schema := new(spec.Schema)
typable.Schema().Typed("object", "").AdditionalProperties = &spec.SchemaOrBool{
Schema: schema,
}
sb := schemaBuilder{
decl: r.decl,
ctx: r.ctx,
}
if err := sb.buildFromType(ftpe.Elem(), schemaTypable{schema, typable.Level() + 1}); err != nil {
return err
}
return nil
case *types.Named:
if decl, found := r.ctx.DeclForType(ftpe.Obj().Type()); found {
if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" {
typable.Typed("string", "date-time")
return nil
}
if sfnm, isf := strfmtName(decl.Comments); isf {
typable.Typed("string", sfnm)
return nil
}
sb := &schemaBuilder{ctx: r.ctx, decl: decl}
sb.inferNames()
if err := sb.buildFromType(decl.Type, typable); err != nil {
return err
}
r.postDecls = append(r.postDecls, sb.postDecls...)
return nil
}
return errors.Errorf("unable to find package and source file for: %s", ftpe.String())
default:
return errors.Errorf("unknown type for %s: %T", fld.String(), fld.Type())
}
}
func (r *responseBuilder) buildFromType(otpe types.Type, resp *spec.Response, seen map[string]bool) error {
switch tpe := otpe.(type) {
case *types.Pointer:
return r.buildFromType(tpe.Elem(), resp, seen)
case *types.Named:
o := tpe.Obj()
switch stpe := o.Type().Underlying().(type) {
case *types.Struct:
debugLog("build from type %s: %T", tpe.Obj().Name(), otpe)
if decl, found := r.ctx.DeclForType(o.Type()); found {
return r.buildFromStruct(decl, stpe, resp, seen)
}
return r.buildFromStruct(r.decl, stpe, resp, seen)
default:
if decl, found := r.ctx.DeclForType(o.Type()); found {
var schema spec.Schema
typable := schemaTypable{schema: &schema, level: 0}
if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" {
typable.Typed("string", "date-time")
return nil
}
if sfnm, isf := strfmtName(decl.Comments); isf {
typable.Typed("string", sfnm)
return nil
}
sb := &schemaBuilder{ctx: r.ctx, decl: decl}
sb.inferNames()
if err := sb.buildFromType(tpe.Underlying(), typable); err != nil {
return err
}
resp.WithSchema(&schema)
r.postDecls = append(r.postDecls, sb.postDecls...)
return nil
}
return errors.Errorf("responses can only be structs, did you mean for %s to be the response body?", otpe.String())
}
default:
return errors.New("anonymous types are currently not supported for responses")
}
}
func (r *responseBuilder) buildFromStruct(decl *entityDecl, tpe *types.Struct, resp *spec.Response, seen map[string]bool) error {
if tpe.NumFields() == 0 {
return nil
}
for i := 0; i < tpe.NumFields(); i++ {
fld := tpe.Field(i)
if fld.Embedded() {
if err := r.buildFromType(fld.Type(), resp, seen); err != nil {
return err
}
continue
}
if fld.Anonymous() {
debugLog("skipping anonymous field")
continue
}
tg := tpe.Tag(i)
var afld *ast.Field
ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
for _, an := range ans {
at, valid := an.(*ast.Field)
if !valid {
continue
}
debugLog("field %s: %s(%T) [%q] ==> %s", fld.Name(), fld.Type().String(), fld.Type(), tg, at.Doc.Text())
afld = at
break
}
if afld == nil {
debugLog("can't find source associated with %s for %s", fld.String(), tpe.String())
continue
}
// if the field is annotated with swagger:ignore, ignore it
if ignored(afld.Doc) {
continue
}
name, ignore, _, err := parseJSONTag(afld)
if err != nil {
return err
}
if ignore {
continue
}
var in string
// scan for param location first, this changes some behavior down the line
if afld.Doc != nil {
for _, cmt := range afld.Doc.List {
for _, line := range strings.Split(cmt.Text, "\n") {
matches := rxIn.FindStringSubmatch(line)
if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
in = strings.TrimSpace(matches[1])
}
}
}
}
ps := resp.Headers[name]
// support swagger:file for response
// An API operation can return a file, such as an image or PDF. In this case,
// define the response schema with type: file and specify the appropriate MIME types in the produces section.
if afld.Doc != nil && fileParam(afld.Doc) {
resp.Schema = &spec.Schema{}
resp.Schema.Typed("file", "")
} else if err := r.buildFromField(fld, fld.Type(), responseTypable{in, &ps, resp}, seen); err != nil {
return err
}
if strfmtName, ok := strfmtName(afld.Doc); ok {
ps.Typed("string", strfmtName)
}
sp := new(sectionedParser)
sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
sp.taggers = []tagParser{
newSingleLineTagParser("maximum", &setMaximum{headerValidations{&ps}, rxf(rxMaximumFmt, "")}),
newSingleLineTagParser("minimum", &setMinimum{headerValidations{&ps}, rxf(rxMinimumFmt, "")}),
newSingleLineTagParser("multipleOf", &setMultipleOf{headerValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
newSingleLineTagParser("minLength", &setMinLength{headerValidations{&ps}, rxf(rxMinLengthFmt, "")}),
newSingleLineTagParser("maxLength", &setMaxLength{headerValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
newSingleLineTagParser("pattern", &setPattern{headerValidations{&ps}, rxf(rxPatternFmt, "")}),
newSingleLineTagParser("collectionFormat", &setCollectionFormat{headerValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
newSingleLineTagParser("minItems", &setMinItems{headerValidations{&ps}, rxf(rxMinItemsFmt, "")}),
newSingleLineTagParser("maxItems", &setMaxItems{headerValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
newSingleLineTagParser("unique", &setUnique{headerValidations{&ps}, rxf(rxUniqueFmt, "")}),
newSingleLineTagParser("enum", &setEnum{headerValidations{&ps}, rxf(rxEnumFmt, "")}),
newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxDefaultFmt, "")}),
newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxExampleFmt, "")}),
}
itemsTaggers := func(items *spec.Items, level int) []tagParser {
// the expression is 1-index based not 0-index
itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
return []tagParser{
newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
}
}
var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
if items == nil {
return []tagParser{}, nil
}
switch iftpe := expr.(type) {
case *ast.ArrayType:
eleTaggers := itemsTaggers(items, level)
sp.taggers = append(eleTaggers, sp.taggers...)
otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
if err != nil {
return nil, err
}
return otherTaggers, nil
case *ast.Ident:
taggers := []tagParser{}
if iftpe.Obj == nil {
taggers = itemsTaggers(items, level)
}
otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
if err != nil {
return nil, err
}
return append(taggers, otherTaggers...), nil
case *ast.StarExpr:
otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
if err != nil {
return nil, err
}
return otherTaggers, nil
default:
return nil, fmt.Errorf("unknown field type ele for %q", name)
}
}
// check if this is a primitive, if so parse the validations from the
// doc comments of the slice declaration.
if ftped, ok := afld.Type.(*ast.ArrayType); ok {
taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
if err != nil {
return err
}
sp.taggers = append(taggers, sp.taggers...)
}
if err := sp.Parse(afld.Doc); err != nil {
return err
}
if in != "body" {
seen[name] = true
if resp.Headers == nil {
resp.Headers = make(map[string]spec.Header)
}
resp.Headers[name] = ps
}
}
for k := range resp.Headers {
if !seen[k] {
delete(resp.Headers, k)
}
}
return nil
}

View file

@ -0,0 +1,248 @@
package codescan
import (
"errors"
"strconv"
"strings"
"github.com/go-openapi/spec"
)
const (
// ParamDescriptionKey indicates the tag used to define a parameter description in swagger:route
ParamDescriptionKey = "description"
// ParamNameKey indicates the tag used to define a parameter name in swagger:route
ParamNameKey = "name"
// ParamInKey indicates the tag used to define a parameter location in swagger:route
ParamInKey = "in"
// ParamRequiredKey indicates the tag used to declare whether a parameter is required in swagger:route
ParamRequiredKey = "required"
// ParamTypeKey indicates the tag used to define the parameter type in swagger:route
ParamTypeKey = "type"
// ParamAllowEmptyKey indicates the tag used to indicate whether a parameter allows empty values in swagger:route
ParamAllowEmptyKey = "allowempty"
// SchemaMinKey indicates the tag used to indicate the minimum value allowed for this type in swagger:route
SchemaMinKey = "min"
// SchemaMaxKey indicates the tag used to indicate the maximum value allowed for this type in swagger:route
SchemaMaxKey = "max"
// SchemaEnumKey indicates the tag used to specify the allowed values for this type in swagger:route
SchemaEnumKey = "enum"
// SchemaFormatKey indicates the expected format for this field in swagger:route
SchemaFormatKey = "format"
// SchemaDefaultKey indicates the default value for this field in swagger:route
SchemaDefaultKey = "default"
// SchemaMinLenKey indicates the minimum length this field in swagger:route
SchemaMinLenKey = "minlength"
// SchemaMaxLenKey indicates the minimum length this field in swagger:route
SchemaMaxLenKey = "maxlength"
// TypeArray is the identifier for an array type in swagger:route
TypeArray = "array"
// TypeNumber is the identifier for a number type in swagger:route
TypeNumber = "number"
// TypeInteger is the identifier for an integer type in swagger:route
TypeInteger = "integer"
// TypeBoolean is the identifier for a boolean type in swagger:route
TypeBoolean = "boolean"
// TypeBool is the identifier for a boolean type in swagger:route
TypeBool = "bool"
// TypeObject is the identifier for an object type in swagger:route
TypeObject = "object"
// TypeString is the identifier for a string type in swagger:route
TypeString = "string"
)
var (
validIn = []string{"path", "query", "header", "body", "form"}
basicTypes = []string{TypeInteger, TypeNumber, TypeString, TypeBoolean, TypeBool, TypeArray}
)
func newSetParams(params []*spec.Parameter, setter func([]*spec.Parameter)) *setOpParams {
return &setOpParams{
set: setter,
parameters: params,
}
}
type setOpParams struct {
set func([]*spec.Parameter)
parameters []*spec.Parameter
}
func (s *setOpParams) Matches(line string) bool {
return rxParameters.MatchString(line)
}
func (s *setOpParams) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
var current *spec.Parameter
var extraData map[string]string
for _, line := range lines {
l := strings.TrimSpace(line)
if strings.HasPrefix(l, "+") {
s.finalizeParam(current, extraData)
current = new(spec.Parameter)
extraData = make(map[string]string)
l = strings.TrimPrefix(l, "+")
}
kv := strings.SplitN(l, ":", 2)
if len(kv) <= 1 {
continue
}
key := strings.ToLower(strings.TrimSpace(kv[0]))
value := strings.TrimSpace(kv[1])
if current == nil {
return errors.New("invalid route/operation schema provided")
}
switch key {
case ParamDescriptionKey:
current.Description = value
case ParamNameKey:
current.Name = value
case ParamInKey:
v := strings.ToLower(value)
if contains(validIn, v) {
current.In = v
}
case ParamRequiredKey:
if v, err := strconv.ParseBool(value); err == nil {
current.Required = v
}
case ParamTypeKey:
if current.Schema == nil {
current.Schema = new(spec.Schema)
}
if contains(basicTypes, value) {
current.Type = strings.ToLower(value)
if current.Type == TypeBool {
current.Type = TypeBoolean
}
} else if ref, err := spec.NewRef("#/definitions/" + value); err == nil {
current.Type = TypeObject
current.Schema.Ref = ref
}
current.Schema.Type = spec.StringOrArray{current.Type}
case ParamAllowEmptyKey:
if v, err := strconv.ParseBool(value); err == nil {
current.AllowEmptyValue = v
}
default:
extraData[key] = value
}
}
s.finalizeParam(current, extraData)
s.set(s.parameters)
return nil
}
func (s *setOpParams) finalizeParam(param *spec.Parameter, data map[string]string) {
if param == nil {
return
}
processSchema(data, param)
s.parameters = append(s.parameters, param)
}
func processSchema(data map[string]string, param *spec.Parameter) {
if param.Schema == nil {
return
}
var enumValues []string
for key, value := range data {
switch key {
case SchemaMinKey:
if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
v, _ := strconv.ParseFloat(value, 64)
param.Schema.Minimum = &v
}
case SchemaMaxKey:
if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
v, _ := strconv.ParseFloat(value, 64)
param.Schema.Maximum = &v
}
case SchemaMinLenKey:
if getType(param.Schema) == TypeArray {
v, _ := strconv.ParseInt(value, 10, 64)
param.Schema.MinLength = &v
}
case SchemaMaxLenKey:
if getType(param.Schema) == TypeArray {
v, _ := strconv.ParseInt(value, 10, 64)
param.Schema.MaxLength = &v
}
case SchemaEnumKey:
enumValues = strings.Split(value, ",")
case SchemaFormatKey:
param.Schema.Format = value
case SchemaDefaultKey:
param.Schema.Default = convert(param.Type, value)
}
}
if param.Description != "" {
param.Schema.Description = param.Description
}
convertEnum(param.Schema, enumValues)
}
func convertEnum(schema *spec.Schema, enumValues []string) {
if len(enumValues) == 0 {
return
}
var finalEnum []interface{}
for _, v := range enumValues {
finalEnum = append(finalEnum, convert(schema.Type[0], strings.TrimSpace(v)))
}
schema.Enum = finalEnum
}
func convert(typeStr, valueStr string) interface{} {
switch typeStr {
case TypeInteger:
fallthrough
case TypeNumber:
if num, err := strconv.ParseFloat(valueStr, 64); err == nil {
return num
}
case TypeBoolean:
fallthrough
case TypeBool:
if b, err := strconv.ParseBool(valueStr); err == nil {
return b
}
}
return valueStr
}
func getType(schema *spec.Schema) string {
if len(schema.Type) == 0 {
return ""
}
return schema.Type[0]
}
func contains(arr []string, obj string) bool {
for _, v := range arr {
if v == obj {
return true
}
}
return false
}

View file

@ -0,0 +1,84 @@
package codescan
import (
"fmt"
"github.com/go-openapi/spec"
)
func opConsumesSetter(op *spec.Operation) func([]string) {
return func(consumes []string) { op.Consumes = consumes }
}
func opProducesSetter(op *spec.Operation) func([]string) {
return func(produces []string) { op.Produces = produces }
}
func opSchemeSetter(op *spec.Operation) func([]string) {
return func(schemes []string) { op.Schemes = schemes }
}
func opSecurityDefsSetter(op *spec.Operation) func([]map[string][]string) {
return func(securityDefs []map[string][]string) { op.Security = securityDefs }
}
func opResponsesSetter(op *spec.Operation) func(*spec.Response, map[int]spec.Response) {
return func(def *spec.Response, scr map[int]spec.Response) {
if op.Responses == nil {
op.Responses = new(spec.Responses)
}
op.Responses.Default = def
op.Responses.StatusCodeResponses = scr
}
}
func opParamSetter(op *spec.Operation) func([]*spec.Parameter) {
return func(params []*spec.Parameter) {
for _, v := range params {
op.AddParam(v)
}
}
}
type routesBuilder struct {
ctx *scanCtx
route parsedPathContent
definitions map[string]spec.Schema
operations map[string]*spec.Operation
responses map[string]spec.Response
parameters []*spec.Parameter
}
func (r *routesBuilder) Build(tgt *spec.Paths) error {
pthObj := tgt.Paths[r.route.Path]
op := setPathOperation(
r.route.Method, r.route.ID,
&pthObj, r.operations[r.route.ID])
op.Tags = r.route.Tags
sp := new(sectionedParser)
sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
sr := newSetResponses(r.definitions, r.responses, opResponsesSetter(op))
spa := newSetParams(r.parameters, opParamSetter(op))
sp.taggers = []tagParser{
newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, opConsumesSetter(op)), false),
newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, opProducesSetter(op)), false),
newSingleLineTagParser("Schemes", newSetSchemes(opSchemeSetter(op))),
newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, opSecurityDefsSetter(op)), false),
newMultiLineTagParser("Parameters", spa, false),
newMultiLineTagParser("Responses", sr, false),
newSingleLineTagParser("Deprecated", &setDeprecatedOp{op}),
}
if err := sp.Parse(r.route.Remaining); err != nil {
return fmt.Errorf("operation (%s): %v", op.ID, err)
}
if tgt.Paths == nil {
tgt.Paths = make(map[string]spec.PathItem)
}
tgt.Paths[r.route.Path] = pthObj
return nil
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,232 @@
package codescan
import (
"github.com/go-openapi/spec"
)
func newSpecBuilder(input *spec.Swagger, sc *scanCtx, scanModels bool) *specBuilder {
if input == nil {
input = new(spec.Swagger)
input.Swagger = "2.0"
}
if input.Paths == nil {
input.Paths = new(spec.Paths)
}
if input.Definitions == nil {
input.Definitions = make(map[string]spec.Schema)
}
if input.Responses == nil {
input.Responses = make(map[string]spec.Response)
}
if input.Extensions == nil {
input.Extensions = make(spec.Extensions)
}
return &specBuilder{
ctx: sc,
input: input,
scanModels: scanModels,
operations: collectOperationsFromInput(input),
definitions: input.Definitions,
responses: input.Responses,
}
}
type specBuilder struct {
scanModels bool
input *spec.Swagger
ctx *scanCtx
discovered []*entityDecl
definitions map[string]spec.Schema
responses map[string]spec.Response
operations map[string]*spec.Operation
}
func (s *specBuilder) Build() (*spec.Swagger, error) {
if err := s.buildModels(); err != nil {
return nil, err
}
if err := s.buildParameters(); err != nil {
return nil, err
}
if err := s.buildRespones(); err != nil {
return nil, err
}
// build definitions dictionary
if err := s.buildDiscovered(); err != nil {
return nil, err
}
if err := s.buildRoutes(); err != nil {
return nil, err
}
if err := s.buildOperations(); err != nil {
return nil, err
}
if err := s.buildMeta(); err != nil {
return nil, err
}
if s.input.Swagger == "" {
s.input.Swagger = "2.0"
}
return s.input, nil
}
func (s *specBuilder) buildDiscovered() error {
// loop over discovered until all the items are in definitions
keepGoing := len(s.discovered) > 0
for keepGoing {
var queue []*entityDecl
for _, d := range s.discovered {
nm, _ := d.Names()
if _, ok := s.definitions[nm]; !ok {
queue = append(queue, d)
}
}
s.discovered = nil
for _, sd := range queue {
if err := s.buildDiscoveredSchema(sd); err != nil {
return err
}
}
keepGoing = len(s.discovered) > 0
}
return nil
}
func (s *specBuilder) buildDiscoveredSchema(decl *entityDecl) error {
sb := &schemaBuilder{
ctx: s.ctx,
decl: decl,
discovered: s.discovered,
}
if err := sb.Build(s.definitions); err != nil {
return err
}
s.discovered = append(s.discovered, sb.postDecls...)
return nil
}
func (s *specBuilder) buildMeta() error {
// build swagger object
for _, decl := range s.ctx.app.Meta {
if err := newMetaParser(s.input).Parse(decl.Comments); err != nil {
return err
}
}
return nil
}
func (s *specBuilder) buildOperations() error {
for _, pp := range s.ctx.app.Operations {
ob := &operationsBuilder{
operations: s.operations,
ctx: s.ctx,
path: pp,
}
if err := ob.Build(s.input.Paths); err != nil {
return err
}
}
return nil
}
func (s *specBuilder) buildRoutes() error {
// build paths dictionary
for _, pp := range s.ctx.app.Routes {
rb := &routesBuilder{
ctx: s.ctx,
route: pp,
responses: s.responses,
operations: s.operations,
definitions: s.definitions,
}
if err := rb.Build(s.input.Paths); err != nil {
return err
}
}
return nil
}
func (s *specBuilder) buildRespones() error {
// build responses dictionary
for _, decl := range s.ctx.app.Responses {
rb := &responseBuilder{
ctx: s.ctx,
decl: decl,
}
if err := rb.Build(s.responses); err != nil {
return err
}
s.discovered = append(s.discovered, rb.postDecls...)
}
return nil
}
func (s *specBuilder) buildParameters() error {
// build parameters dictionary
for _, decl := range s.ctx.app.Parameters {
pb := &parameterBuilder{
ctx: s.ctx,
decl: decl,
}
if err := pb.Build(s.operations); err != nil {
return err
}
s.discovered = append(s.discovered, pb.postDecls...)
}
return nil
}
func (s *specBuilder) buildModels() error {
// build models dictionary
if !s.scanModels {
return nil
}
for _, decl := range s.ctx.app.Models {
if err := s.buildDiscoveredSchema(decl); err != nil {
return err
}
}
return nil
}
func collectOperationsFromInput(input *spec.Swagger) map[string]*spec.Operation {
operations := make(map[string]*spec.Operation)
if input != nil && input.Paths != nil {
for _, pth := range input.Paths.Paths {
if pth.Get != nil {
operations[pth.Get.ID] = pth.Get
}
if pth.Post != nil {
operations[pth.Post.ID] = pth.Post
}
if pth.Put != nil {
operations[pth.Put.ID] = pth.Put
}
if pth.Patch != nil {
operations[pth.Patch.ID] = pth.Patch
}
if pth.Delete != nil {
operations[pth.Delete.ID] = pth.Delete
}
if pth.Head != nil {
operations[pth.Head.ID] = pth.Head
}
if pth.Options != nil {
operations[pth.Options.ID] = pth.Options
}
}
}
return operations
}

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,199 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generator
import (
"encoding/json"
"errors"
"fmt"
"os"
"path"
"path/filepath"
"sort"
"github.com/go-openapi/analysis"
"github.com/go-openapi/runtime"
"github.com/go-openapi/swag"
)
// GenerateClient generates a client library for a swagger spec document.
func GenerateClient(name string, modelNames, operationIDs []string, opts *GenOpts) error {
templates.LoadDefaults()
if opts == nil {
return errors.New("gen opts are required")
}
if opts.Template != "" {
if err := templates.LoadContrib(opts.Template); err != nil {
return err
}
}
if opts.TemplateDir != "" {
if err := templates.LoadDir(opts.TemplateDir); err != nil {
return err
}
}
if err := opts.CheckOpts(); err != nil {
return err
}
// Load the spec
_, specDoc, err := loadSpec(opts.Spec)
if err != nil {
return err
}
// Validate and Expand. specDoc is in/out param.
specDoc, err = validateAndFlattenSpec(opts, specDoc)
if err != nil {
return err
}
analyzed := analysis.New(specDoc.Spec())
models, err := gatherModels(specDoc, modelNames)
if err != nil {
return err
}
operations := gatherOperations(analyzed, operationIDs)
if len(operations) == 0 {
return errors.New("no operations were selected")
}
defaultScheme := opts.DefaultScheme
if defaultScheme == "" {
defaultScheme = sHTTP
}
defaultConsumes := opts.DefaultConsumes
if defaultConsumes == "" {
defaultConsumes = runtime.JSONMime
}
defaultProduces := opts.DefaultProduces
if defaultProduces == "" {
defaultProduces = runtime.JSONMime
}
generator := appGenerator{
Name: appNameOrDefault(specDoc, name, "rest"),
SpecDoc: specDoc,
Analyzed: analyzed,
Models: models,
Operations: operations,
Target: opts.Target,
DumpData: opts.DumpData,
Package: opts.LanguageOpts.ManglePackageName(opts.ClientPackage, "client"),
APIPackage: opts.LanguageOpts.ManglePackagePath(opts.APIPackage, "api"),
ModelsPackage: opts.LanguageOpts.ManglePackagePath(opts.ModelPackage, "definitions"),
ServerPackage: opts.LanguageOpts.ManglePackagePath(opts.ServerPackage, "server"),
ClientPackage: opts.LanguageOpts.ManglePackagePath(opts.ClientPackage, "client"),
OperationsPackage: opts.LanguageOpts.ManglePackagePath(opts.ClientPackage, "client"),
Principal: opts.Principal,
DefaultScheme: defaultScheme,
DefaultProduces: defaultProduces,
DefaultConsumes: defaultConsumes,
GenOpts: opts,
}
generator.Receiver = "o"
return (&clientGenerator{generator}).Generate()
}
type clientGenerator struct {
appGenerator
}
func (c *clientGenerator) Generate() error {
app, err := c.makeCodegenApp()
if app.Name == "" {
app.Name = "APIClient"
}
baseImport := c.GenOpts.LanguageOpts.baseImport(c.Target)
if c.GenOpts.ExistingModels == "" {
if app.Imports == nil {
app.Imports = make(map[string]string)
}
pkgAlias := c.GenOpts.LanguageOpts.ManglePackageName(c.ModelsPackage, "models")
app.Imports[pkgAlias] = path.Join(
filepath.ToSlash(baseImport),
c.GenOpts.LanguageOpts.ManglePackagePath(c.GenOpts.ModelPackage, "models"))
} else {
app.DefaultImports = append(app.DefaultImports, c.GenOpts.LanguageOpts.ManglePackagePath(c.GenOpts.ExistingModels, ""))
}
if err != nil {
return err
}
if c.DumpData {
bb, _ := json.MarshalIndent(swag.ToDynamicJSON(app), "", " ")
fmt.Fprintln(os.Stdout, string(bb))
return nil
}
if c.GenOpts.IncludeModel {
for _, mod := range app.Models {
modCopy := mod
modCopy.IncludeValidator = true
if !mod.IsStream {
if err := c.GenOpts.renderDefinition(&modCopy); err != nil {
return err
}
}
}
}
if c.GenOpts.IncludeHandler {
sort.Sort(app.OperationGroups)
for i := range app.OperationGroups {
opGroup := app.OperationGroups[i]
opGroup.DefaultImports = app.DefaultImports
opGroup.RootPackage = c.ClientPackage
opGroup.GenOpts = c.GenOpts
app.OperationGroups[i] = opGroup
sort.Sort(opGroup.Operations)
for _, op := range opGroup.Operations {
opCopy := op
if opCopy.Package == "" {
opCopy.Package = c.Package
}
if err := c.GenOpts.renderOperation(&opCopy); err != nil {
return err
}
}
app.DefaultImports = append(app.DefaultImports,
path.Join(
filepath.ToSlash(baseImport),
c.GenOpts.LanguageOpts.ManglePackagePath(c.ClientPackage, "client"),
opGroup.Name))
if err := c.GenOpts.renderOperationGroup(&opGroup); err != nil {
return err
}
}
}
if c.GenOpts.IncludeSupport {
if err := c.GenOpts.renderApplication(&app); err != nil {
return err
}
}
return nil
}

View file

@ -0,0 +1,61 @@
package generator
import (
"fmt"
"os"
"path/filepath"
"github.com/spf13/viper"
)
// LanguageDefinition in the configuration file.
type LanguageDefinition struct {
Layout SectionOpts `mapstructure:"layout"`
}
// ConfigureOpts for generation
func (d *LanguageDefinition) ConfigureOpts(opts *GenOpts) error {
opts.Sections = d.Layout
if opts.LanguageOpts == nil {
opts.LanguageOpts = GoLangOpts()
}
return nil
}
// LanguageConfig structure that is obtained from parsing a config file
type LanguageConfig map[string]LanguageDefinition
// ReadConfig at the specified path, when no path is specified it will look into
// the current directory and load a .swagger.{yml,json,hcl,toml,properties} file
// Returns a viper config or an error
func ReadConfig(fpath string) (*viper.Viper, error) {
v := viper.New()
if fpath != "" {
if !fileExists(fpath, "") {
return nil, fmt.Errorf("can't find file for %q", fpath)
}
file, err := os.Open(fpath)
if err != nil {
return nil, err
}
defer func() { _ = file.Close() }()
ext := filepath.Ext(fpath)
if len(ext) > 0 {
ext = ext[1:]
}
v.SetConfigType(ext)
if err := v.ReadConfig(file); err != nil {
return nil, err
}
return v, nil
}
v.SetConfigName(".swagger")
v.AddConfigPath(".")
if err := v.ReadInConfig(); err != nil {
if _, ok := err.(viper.UnsupportedConfigError); !ok && v.ConfigFileUsed() != "" {
return nil, err
}
}
return v, nil
}

View file

@ -0,0 +1,68 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generator
import (
"encoding/json"
"fmt"
"log"
"os"
"path/filepath"
"runtime"
)
var (
// Debug when the env var DEBUG or SWAGGER_DEBUG is not empty
// the generators will be very noisy about what they are doing
Debug = os.Getenv("DEBUG") != "" || os.Getenv("SWAGGER_DEBUG") != ""
// generatorLogger is a debug logger for this package
generatorLogger *log.Logger
)
func init() {
debugOptions()
}
func debugOptions() {
generatorLogger = log.New(os.Stdout, "generator:", log.LstdFlags)
}
// debugLog wraps log.Printf with a debug-specific logger
func debugLog(frmt string, args ...interface{}) {
if Debug {
_, file, pos, _ := runtime.Caller(1)
generatorLogger.Printf("%s:%d: %s", filepath.Base(file), pos,
fmt.Sprintf(frmt, args...))
}
}
// debugLogAsJSON unmarshals its last arg as pretty JSON
func debugLogAsJSON(frmt string, args ...interface{}) {
if Debug {
var dfrmt string
_, file, pos, _ := runtime.Caller(1)
dargs := make([]interface{}, 0, len(args)+2)
dargs = append(dargs, filepath.Base(file), pos)
if len(args) > 0 {
dfrmt = "%s:%d: " + frmt + "\n%s"
bbb, _ := json.MarshalIndent(args[len(args)-1], "", " ")
dargs = append(dargs, args[0:len(args)-1]...)
dargs = append(dargs, string(bbb))
} else {
dfrmt = "%s:%d: " + frmt
}
generatorLogger.Printf(dfrmt, dargs...)
}
}

View file

@ -0,0 +1,75 @@
package generator
import (
"github.com/go-openapi/analysis"
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
)
type discInfo struct {
Discriminators map[string]discor
Discriminated map[string]discee
}
type discor struct {
FieldName string `json:"fieldName"`
GoType string `json:"goType"`
JSONName string `json:"jsonName"`
Children []discee `json:"children"`
}
type discee struct {
FieldName string `json:"fieldName"`
FieldValue string `json:"fieldValue"`
GoType string `json:"goType"`
JSONName string `json:"jsonName"`
Ref spec.Ref `json:"ref"`
ParentRef spec.Ref `json:"parentRef"`
}
func discriminatorInfo(doc *analysis.Spec) *discInfo {
baseTypes := make(map[string]discor)
for _, sch := range doc.AllDefinitions() {
if sch.Schema.Discriminator != "" {
tpe, _ := sch.Schema.Extensions.GetString(xGoName)
if tpe == "" {
tpe = swag.ToGoName(sch.Name)
}
baseTypes[sch.Ref.String()] = discor{
FieldName: sch.Schema.Discriminator,
GoType: tpe,
JSONName: sch.Name,
}
}
}
subTypes := make(map[string]discee)
for _, sch := range doc.SchemasWithAllOf() {
for _, ao := range sch.Schema.AllOf {
if ao.Ref.String() != "" {
if bt, ok := baseTypes[ao.Ref.String()]; ok {
name, _ := sch.Schema.Extensions.GetString(xClass)
if name == "" {
name = sch.Name
}
tpe, _ := sch.Schema.Extensions.GetString(xGoName)
if tpe == "" {
tpe = swag.ToGoName(sch.Name)
}
dce := discee{
FieldName: bt.FieldName,
FieldValue: name,
Ref: sch.Ref,
ParentRef: ao.Ref,
JSONName: sch.Name,
GoType: tpe,
}
subTypes[sch.Ref.String()] = dce
bt.Children = append(bt.Children, dce)
baseTypes[ao.Ref.String()] = bt
}
}
}
}
return &discInfo{Discriminators: baseTypes, Discriminated: subTypes}
}

View file

@ -0,0 +1,77 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*Package generator provides the code generation library for go-swagger.
Generating data types
The general idea is that you should rarely see interface{} in the generated code.
You get a complete representation of a swagger document in somewhat idiomatic go.
To do so, there is a set of mapping patterns that are applied,
to map a Swagger specification to go types:
definition of primitive => type alias/name
definition of array => type alias/name
definition of map => type alias/name
definition of object
with properties => struct
definition of $ref => type alias/name
object with only
additional properties => map[string]T
object with additional
properties and properties => custom serializer
schema with schema array
in items => tuple (struct with properties, custom serializer)
schema with all of => struct
* allOf schema with $ref => embedded value
* allOf schema with properties => properties are included in struct
* adding an allOf schema with just "x-isnullable": true or
"x-nullable": true turns the schema into a pointer when
there are only other extension properties provided
NOTE: anyOf and oneOf JSON-schema constructs are not supported by Swagger 2.0
A property on a definition is a pointer when any one of the following conditions is met:
it is an object schema (struct)
it has x-nullable or x-isnullable as vendor extension
it is a primitive where the zero value is valid but would fail validation
otherwise strings minLength > 0 or required results in non-pointer
numbers min > 0, max < 0 and min < max
JSONSchema and by extension Swagger allow for items that have a fixed size array,
with the schema describing the items at each index. This can be combined with additional items
to form some kind of tuple with varargs.
To map this to go it creates a struct that has fixed names and a custom json serializer.
NOTE: the additionalItems keyword is not supported by Swagger 2.0. However, the generator and validator parts
in go-swagger do.
Documenting the generated code
The code that is generated also gets the doc comments that are used by the scanner
to generate a spec from go code. So that after generation you should be able to reverse
generate a spec from the code that was generated by your spec.
It should be equivalent to the original spec but might miss some default values and examples. */
package generator

View file

@ -0,0 +1,226 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generator
// TODO: we may probably find a way to register most of this dynamically from strfmt
// map of function calls to be generated to get the zero value of a given type
var zeroes = map[string]string{
"bool": "false",
"float32": "0",
"float64": "0",
"int": "0",
"int8": "0",
"int16": "0",
"int32": "0",
"int64": "0",
"string": "\"\"",
"uint": "0",
"uint8": "0",
"uint16": "0",
"uint32": "0",
"uint64": "0",
// Extended formats (23 formats corresponding to the Default registry
// provided by go-openapi/strfmt)
"strfmt.Base64": "strfmt.Base64([]byte(nil))",
"strfmt.CreditCard": "strfmt.CreditCard(\"\")",
"strfmt.Date": "strfmt.Date{}",
"strfmt.DateTime": "strfmt.DateTime{}",
"strfmt.Duration": "strfmt.Duration(0)",
"strfmt.Email": "strfmt.Email(\"\")",
"strfmt.HexColor": "strfmt.HexColor(\"#000000\")",
"strfmt.Hostname": "strfmt.Hostname(\"\")",
"strfmt.IPv4": "strfmt.IPv4(\"\")",
"strfmt.IPv6": "strfmt.IPv6(\"\")",
"strfmt.ISBN": "strfmt.ISBN(\"\")",
"strfmt.ISBN10": "strfmt.ISBN10(\"\")",
"strfmt.ISBN13": "strfmt.ISBN13(\"\")",
"strfmt.MAC": "strfmt.MAC(\"\")",
"strfmt.ObjectId": "strfmt.ObjectId{}",
"strfmt.Password": "strfmt.Password(\"\")",
"strfmt.RGBColor": "strfmt.RGBColor(\"rgb(0,0,0)\")",
"strfmt.SSN": "strfmt.SSN(\"\")",
"strfmt.URI": "strfmt.URI(\"\")",
"strfmt.UUID": "strfmt.UUID(\"\")",
"strfmt.UUID3": "strfmt.UUID3(\"\")",
"strfmt.UUID4": "strfmt.UUID4(\"\")",
"strfmt.UUID5": "strfmt.UUID5(\"\")",
//"file": "runtime.File",
}
// conversion functions from string representation to a numerical or boolean
// primitive type
var stringConverters = map[string]string{
"bool": "swag.ConvertBool",
"float32": "swag.ConvertFloat32",
"float64": "swag.ConvertFloat64",
"int8": "swag.ConvertInt8",
"int16": "swag.ConvertInt16",
"int32": "swag.ConvertInt32",
"int64": "swag.ConvertInt64",
"uint8": "swag.ConvertUint8",
"uint16": "swag.ConvertUint16",
"uint32": "swag.ConvertUint32",
"uint64": "swag.ConvertUint64",
}
// formatting (string representation) functions from a native representation
// of a numerical or boolean primitive type
var stringFormatters = map[string]string{
"bool": "swag.FormatBool",
"float32": "swag.FormatFloat32",
"float64": "swag.FormatFloat64",
"int8": "swag.FormatInt8",
"int16": "swag.FormatInt16",
"int32": "swag.FormatInt32",
"int64": "swag.FormatInt64",
"uint8": "swag.FormatUint8",
"uint16": "swag.FormatUint16",
"uint32": "swag.FormatUint32",
"uint64": "swag.FormatUint64",
}
// typeMapping contains a mapping of type name to go type
var typeMapping = map[string]string{
// Standard formats with native, straightforward, mapping
"string": "string",
"boolean": "bool",
"integer": "int64",
"number": "float64",
// For file producers
"file": "runtime.File",
}
// formatMapping contains a type-specific version of mapping of format to go type
var formatMapping = map[string]map[string]string{
"number": {
"double": "float64",
"float": "float32",
"int": "int64",
"int8": "int8",
"int16": "int16",
"int32": "int32",
"int64": "int64",
"uint": "uint64",
"uint8": "uint8",
"uint16": "uint16",
"uint32": "uint32",
"uint64": "uint64",
},
"integer": {
"int": "int64",
"int8": "int8",
"int16": "int16",
"int32": "int32",
"int64": "int64",
"uint": "uint64",
"uint8": "uint8",
"uint16": "uint16",
"uint32": "uint32",
"uint64": "uint64",
},
"string": {
"char": "rune",
// Extended format registry from go-openapi/strfmt.
// Currently, 23 such formats are supported (default strftm registry),
// plus the following aliases:
// - "datetime" alias for the more official "date-time"
// - "objectid" and "ObjectId" aliases for "bsonobjectid"
"binary": "io.ReadCloser",
"byte": "strfmt.Base64",
"creditcard": "strfmt.CreditCard",
"date": "strfmt.Date",
"date-time": "strfmt.DateTime",
"datetime": "strfmt.DateTime",
"duration": "strfmt.Duration",
"email": "strfmt.Email",
"hexcolor": "strfmt.HexColor",
"hostname": "strfmt.Hostname",
"ipv4": "strfmt.IPv4",
"ipv6": "strfmt.IPv6",
"isbn": "strfmt.ISBN",
"isbn10": "strfmt.ISBN10",
"isbn13": "strfmt.ISBN13",
"mac": "strfmt.MAC",
"bsonobjectid": "strfmt.ObjectId",
"objectid": "strfmt.ObjectId",
"ObjectId": "strfmt.ObjectId", // NOTE: does it work with uppercase?
"password": "strfmt.Password",
"rgbcolor": "strfmt.RGBColor",
"ssn": "strfmt.SSN",
"uri": "strfmt.URI",
"uuid": "strfmt.UUID",
"uuid3": "strfmt.UUID3",
"uuid4": "strfmt.UUID4",
"uuid5": "strfmt.UUID5",
// For file producers
"file": "runtime.File",
},
}
// go primitive types
var primitives = map[string]struct{}{
"bool": {},
"byte": {},
"[]byte": {},
"complex64": {},
"complex128": {},
"float32": {},
"float64": {},
"int": {},
"int8": {},
"int16": {},
"int32": {},
"int64": {},
"rune": {},
"string": {},
"uint": {},
"uint8": {},
"uint16": {},
"uint32": {},
"uint64": {},
}
// Formats with a custom formatter.
// Currently, 23 such formats are supported
var customFormatters = map[string]struct{}{
"strfmt.Base64": {},
"strfmt.CreditCard": {},
"strfmt.Date": {},
"strfmt.DateTime": {},
"strfmt.Duration": {},
"strfmt.Email": {},
"strfmt.HexColor": {},
"strfmt.Hostname": {},
"strfmt.IPv4": {},
"strfmt.IPv6": {},
"strfmt.ISBN": {},
"strfmt.ISBN10": {},
"strfmt.ISBN13": {},
"strfmt.MAC": {},
"strfmt.ObjectId": {},
"strfmt.Password": {},
"strfmt.RGBColor": {},
"strfmt.SSN": {},
"strfmt.URI": {},
"strfmt.UUID": {},
"strfmt.UUID3": {},
"strfmt.UUID4": {},
"strfmt.UUID5": {},
// the following interfaces do not generate validations
"io.ReadCloser": {}, // for "format": "binary" (server side)
"io.Writer": {}, // for "format": "binary" (client side)
// NOTE: runtime.File is not a customFormatter
}

View file

@ -0,0 +1,2 @@
go-bindata -debug -pkg=generator -ignore='.*\.sw?' -ignore='.*\.md' ./templates/...
echo regenerated

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,633 @@
package generator
import (
"bytes"
"encoding/json"
"fmt"
"sort"
"strconv"
"strings"
"github.com/go-openapi/spec"
)
// GenCommon contains common properties needed across
// definitions, app and operations
// TargetImportPath may be used by templates to import other (possibly
// generated) packages in the generation path (e.g. relative to GOPATH).
// TargetImportPath is NOT used by standard templates.
type GenCommon struct {
Copyright string
TargetImportPath string
}
// GenDefinition contains all the properties to generate a
// definition from a swagger spec
type GenDefinition struct {
GenCommon
GenSchema
Package string
Imports map[string]string
DefaultImports []string
ExtraSchemas GenSchemaList
DependsOn []string
External bool
}
// GenDefinitions represents a list of operations to generate
// this implements a sort by operation id
type GenDefinitions []GenDefinition
func (g GenDefinitions) Len() int { return len(g) }
func (g GenDefinitions) Less(i, j int) bool { return g[i].Name < g[j].Name }
func (g GenDefinitions) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
// GenSchemaList is a list of schemas for generation.
//
// It can be sorted by name to get a stable struct layout for
// version control and such
type GenSchemaList []GenSchema
// GenSchema contains all the information needed to generate the code
// for a schema
type GenSchema struct {
resolvedType
sharedValidations
Example string
OriginalName string
Name string
Suffix string
Path string
ValueExpression string
IndexVar string
KeyVar string
Title string
Description string
Location string
ReceiverName string
Items *GenSchema
AllowsAdditionalItems bool
HasAdditionalItems bool
AdditionalItems *GenSchema
Object *GenSchema
XMLName string
CustomTag string
Properties GenSchemaList
AllOf GenSchemaList
HasAdditionalProperties bool
IsAdditionalProperties bool
AdditionalProperties *GenSchema
StrictAdditionalProperties bool
ReadOnly bool
IsVirtual bool
IsBaseType bool
HasBaseType bool
IsSubType bool
IsExported bool
DiscriminatorField string
DiscriminatorValue string
Discriminates map[string]string
Parents []string
IncludeValidator bool
IncludeModel bool
Default interface{}
}
func (g GenSchemaList) Len() int { return len(g) }
func (g GenSchemaList) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
func (g GenSchemaList) Less(i, j int) bool {
a, okA := g[i].Extensions[xOrder].(float64)
b, okB := g[j].Extensions[xOrder].(float64)
// If both properties have x-order defined, then the one with lower x-order is smaller
if okA && okB {
return a < b
}
// If only the first property has x-order defined, then it is smaller
if okA {
return true
}
// If only the second property has x-order defined, then it is smaller
if okB {
return false
}
// If neither property has x-order defined, then the one with lower lexicographic name is smaller
return g[i].Name < g[j].Name
}
type sharedValidations struct {
HasValidations bool
Required bool
// String validations
MaxLength *int64
MinLength *int64
Pattern string
// Number validations
MultipleOf *float64
Minimum *float64
Maximum *float64
ExclusiveMinimum bool
ExclusiveMaximum bool
Enum []interface{}
ItemsEnum []interface{}
// Slice validations
MinItems *int64
MaxItems *int64
UniqueItems bool
HasSliceValidations bool
// Not used yet (perhaps intended for maxProperties, minProperties validations?)
NeedsSize bool
// NOTE: "patternProperties" and "dependencies" not supported by Swagger 2.0
}
// GenResponse represents a response object for code generation
type GenResponse struct {
Package string
ModelsPackage string
ReceiverName string
Name string
Description string
IsSuccess bool
Code int
Method string
Path string
Headers GenHeaders
Schema *GenSchema
AllowsForStreaming bool
Imports map[string]string
DefaultImports []string
Extensions map[string]interface{}
}
// GenHeader represents a header on a response for code generation
type GenHeader struct {
resolvedType
sharedValidations
Package string
ReceiverName string
IndexVar string
ID string
Name string
Path string
ValueExpression string
Title string
Description string
Default interface{}
HasDefault bool
CollectionFormat string
Child *GenItems
Parent *GenItems
Converter string
Formatter string
ZeroValue string
}
// ItemsDepth returns a string "items.items..." with as many items as the level of nesting of the array.
// For a header objects it always returns "".
func (g *GenHeader) ItemsDepth() string {
// NOTE: this is currently used by templates to generate explicit comments in nested structures
return ""
}
// GenHeaders is a sorted collection of headers for codegen
type GenHeaders []GenHeader
func (g GenHeaders) Len() int { return len(g) }
func (g GenHeaders) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
func (g GenHeaders) Less(i, j int) bool { return g[i].Name < g[j].Name }
// HasSomeDefaults returns true is at least one header has a default value set
func (g GenHeaders) HasSomeDefaults() bool {
// NOTE: this is currently used by templates to avoid empty constructs
for _, header := range g {
if header.HasDefault {
return true
}
}
return false
}
// GenParameter is used to represent
// a parameter or a header for code generation.
type GenParameter struct {
resolvedType
sharedValidations
ID string
Name string
ModelsPackage string
Path string
ValueExpression string
IndexVar string
KeyVar string
ReceiverName string
Location string
Title string
Description string
Converter string
Formatter string
Schema *GenSchema
CollectionFormat string
Child *GenItems
Parent *GenItems
/// Unused
//BodyParam *GenParameter
Default interface{}
HasDefault bool
ZeroValue string
AllowEmptyValue bool
// validation strategy for Body params, which may mix model and simple constructs.
// Distinguish the following cases:
// - HasSimpleBodyParams: body is an inline simple type
// - HasModelBodyParams: body is a model objectd
// - HasSimpleBodyItems: body is an inline array of simple type
// - HasModelBodyItems: body is an array of model objects
// - HasSimpleBodyMap: body is a map of simple objects (possibly arrays)
// - HasModelBodyMap: body is a map of model objects
HasSimpleBodyParams bool
HasModelBodyParams bool
HasSimpleBodyItems bool
HasModelBodyItems bool
HasSimpleBodyMap bool
HasModelBodyMap bool
Extensions map[string]interface{}
}
// IsQueryParam returns true when this parameter is a query param
func (g *GenParameter) IsQueryParam() bool {
return g.Location == "query"
}
// IsPathParam returns true when this parameter is a path param
func (g *GenParameter) IsPathParam() bool {
return g.Location == "path"
}
// IsFormParam returns true when this parameter is a form param
func (g *GenParameter) IsFormParam() bool {
return g.Location == "formData"
}
// IsHeaderParam returns true when this parameter is a header param
func (g *GenParameter) IsHeaderParam() bool {
return g.Location == "header"
}
// IsBodyParam returns true when this parameter is a body param
func (g *GenParameter) IsBodyParam() bool {
return g.Location == "body"
}
// IsFileParam returns true when this parameter is a file param
func (g *GenParameter) IsFileParam() bool {
return g.SwaggerType == "file"
}
// ItemsDepth returns a string "items.items..." with as many items as the level of nesting of the array.
// For a parameter object, it always returns "".
func (g *GenParameter) ItemsDepth() string {
// NOTE: this is currently used by templates to generate explicit comments in nested structures
return ""
}
// GenParameters represents a sorted parameter collection
type GenParameters []GenParameter
func (g GenParameters) Len() int { return len(g) }
func (g GenParameters) Less(i, j int) bool { return g[i].Name < g[j].Name }
func (g GenParameters) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
// HasSomeDefaults returns true is at least one parameter has a default value set
func (g GenParameters) HasSomeDefaults() bool {
// NOTE: this is currently used by templates to avoid empty constructs
for _, param := range g {
if param.HasDefault {
return true
}
}
return false
}
// GenItems represents the collection items for a collection parameter
type GenItems struct {
sharedValidations
resolvedType
Name string
Path string
ValueExpression string
CollectionFormat string
Child *GenItems
Parent *GenItems
Converter string
Formatter string
Location string
IndexVar string
KeyVar string
// instructs generator to skip the splitting and parsing from CollectionFormat
SkipParse bool
}
// ItemsDepth returns a string "items.items..." with as many items as the level of nesting of the array.
func (g *GenItems) ItemsDepth() string {
// NOTE: this is currently used by templates to generate explicit comments in nested structures
current := g
i := 1
for current.Parent != nil {
i++
current = current.Parent
}
return strings.Repeat("items.", i)
}
// GenOperationGroup represents a named (tagged) group of operations
type GenOperationGroup struct {
GenCommon
Name string
Operations GenOperations
Summary string
Description string
Imports map[string]string
DefaultImports []string
RootPackage string
GenOpts *GenOpts
}
// GenOperationGroups is a sorted collection of operation groups
type GenOperationGroups []GenOperationGroup
func (g GenOperationGroups) Len() int { return len(g) }
func (g GenOperationGroups) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
func (g GenOperationGroups) Less(i, j int) bool { return g[i].Name < g[j].Name }
// GenStatusCodeResponses a container for status code responses
type GenStatusCodeResponses []GenResponse
func (g GenStatusCodeResponses) Len() int { return len(g) }
func (g GenStatusCodeResponses) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
func (g GenStatusCodeResponses) Less(i, j int) bool { return g[i].Code < g[j].Code }
// MarshalJSON marshals these responses to json
func (g GenStatusCodeResponses) MarshalJSON() ([]byte, error) {
if g == nil {
return nil, nil
}
var buf bytes.Buffer
buf.WriteRune('{')
for i, v := range g {
rb, err := json.Marshal(v)
if err != nil {
return nil, err
}
if i > 0 {
buf.WriteRune(',')
}
buf.WriteString(fmt.Sprintf("%q:", strconv.Itoa(v.Code)))
buf.Write(rb)
}
buf.WriteRune('}')
return buf.Bytes(), nil
}
// UnmarshalJSON unmarshals this GenStatusCodeResponses from json
func (g *GenStatusCodeResponses) UnmarshalJSON(data []byte) error {
var dd map[string]GenResponse
if err := json.Unmarshal(data, &dd); err != nil {
return err
}
var gg GenStatusCodeResponses
for _, v := range dd {
gg = append(gg, v)
}
sort.Sort(gg)
*g = gg
return nil
}
// GenOperation represents an operation for code generation
type GenOperation struct {
GenCommon
Package string
ReceiverName string
Name string
Summary string
Description string
Method string
Path string
BasePath string
Tags []string
RootPackage string
Imports map[string]string
DefaultImports []string
ExtraSchemas GenSchemaList
Authorized bool
Security []GenSecurityRequirements
SecurityDefinitions GenSecuritySchemes
Principal string
SuccessResponse *GenResponse
SuccessResponses []GenResponse
Responses GenStatusCodeResponses
DefaultResponse *GenResponse
Params GenParameters
QueryParams GenParameters
PathParams GenParameters
HeaderParams GenParameters
FormParams GenParameters
HasQueryParams bool
HasPathParams bool
HasHeaderParams bool
HasFormParams bool
HasFormValueParams bool
HasFileParams bool
HasBodyParams bool
HasStreamingResponse bool
Schemes []string
ExtraSchemes []string
ProducesMediaTypes []string
ConsumesMediaTypes []string
TimeoutName string
Extensions map[string]interface{}
}
// GenOperations represents a list of operations to generate
// this implements a sort by operation id
type GenOperations []GenOperation
func (g GenOperations) Len() int { return len(g) }
func (g GenOperations) Less(i, j int) bool { return g[i].Name < g[j].Name }
func (g GenOperations) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
// GenApp represents all the meta data needed to generate an application
// from a swagger spec
type GenApp struct {
GenCommon
APIPackage string
Package string
ReceiverName string
Name string
Principal string
DefaultConsumes string
DefaultProduces string
Host string
BasePath string
Info *spec.Info
ExternalDocs *spec.ExternalDocumentation
Imports map[string]string
DefaultImports []string
Schemes []string
ExtraSchemes []string
Consumes GenSerGroups
Produces GenSerGroups
SecurityDefinitions GenSecuritySchemes
Models []GenDefinition
Operations GenOperations
OperationGroups GenOperationGroups
SwaggerJSON string
// Embedded specs: this is important for when the generated server adds routes.
// NOTE: there is a distinct advantage to having this in runtime rather than generated code.
// We are noti ever going to generate the router.
// If embedding spec is an issue (e.g. memory usage), this can be excluded with the --exclude-spec
// generation option. Alternative methods to serve spec (e.g. from disk, ...) may be implemented by
// adding a middleware to the generated API.
FlatSwaggerJSON string
ExcludeSpec bool
GenOpts *GenOpts
}
// UseGoStructFlags returns true when no strategy is specified or it is set to "go-flags"
func (g *GenApp) UseGoStructFlags() bool {
if g.GenOpts == nil {
return true
}
return g.GenOpts.FlagStrategy == "" || g.GenOpts.FlagStrategy == "go-flags"
}
// UsePFlags returns true when the flag strategy is set to pflag
func (g *GenApp) UsePFlags() bool {
return g.GenOpts != nil && strings.HasPrefix(g.GenOpts.FlagStrategy, "pflag")
}
// UseIntermediateMode for https://wiki.mozilla.org/Security/Server_Side_TLS#Intermediate_compatibility_.28default.29
func (g *GenApp) UseIntermediateMode() bool {
return g.GenOpts != nil && g.GenOpts.CompatibilityMode == "intermediate"
}
// UseModernMode for https://wiki.mozilla.org/Security/Server_Side_TLS#Modern_compatibility
func (g *GenApp) UseModernMode() bool {
return g.GenOpts == nil || g.GenOpts.CompatibilityMode == "" || g.GenOpts.CompatibilityMode == "modern"
}
// GenSerGroups sorted representation of serializer groups
type GenSerGroups []GenSerGroup
func (g GenSerGroups) Len() int { return len(g) }
func (g GenSerGroups) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
func (g GenSerGroups) Less(i, j int) bool { return g[i].MediaType < g[j].MediaType }
// GenSerGroup represents a group of serializers, most likely this is a media type to a list of
// prioritized serializers.
type GenSerGroup struct {
ReceiverName string
AppName string
Name string
MediaType string
Implementation string
AllSerializers GenSerializers
}
// GenSerializers sorted representation of serializers
type GenSerializers []GenSerializer
func (g GenSerializers) Len() int { return len(g) }
func (g GenSerializers) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
func (g GenSerializers) Less(i, j int) bool { return g[i].MediaType < g[j].MediaType }
// GenSerializer represents a single serializer for a particular media type
type GenSerializer struct {
ReceiverName string
AppName string
Name string
MediaType string
Implementation string
}
// GenSecurityScheme represents a security scheme for code generation
type GenSecurityScheme struct {
AppName string
ID string
Name string
ReceiverName string
IsBasicAuth bool
IsAPIKeyAuth bool
IsOAuth2 bool
Scopes []string
Source string
Principal string
// from spec.SecurityScheme
Description string
Type string
In string
Flow string
AuthorizationURL string
TokenURL string
Extensions map[string]interface{}
}
// GenSecuritySchemes sorted representation of serializers
type GenSecuritySchemes []GenSecurityScheme
func (g GenSecuritySchemes) Len() int { return len(g) }
func (g GenSecuritySchemes) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
func (g GenSecuritySchemes) Less(i, j int) bool { return g[i].ID < g[j].ID }
// GenSecurityRequirement represents a security requirement for an operation
type GenSecurityRequirement struct {
Name string
Scopes []string
}
// GenSecurityRequirements represents a compounded security requirement specification.
// In a []GenSecurityRequirements complete requirements specification,
// outer elements are interpreted as optional requirements (OR), and
// inner elements are interpreted as jointly required (AND).
type GenSecurityRequirements []GenSecurityRequirement
func (g GenSecurityRequirements) Len() int { return len(g) }
func (g GenSecurityRequirements) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
func (g GenSecurityRequirements) Less(i, j int) bool { return g[i].Name < g[j].Name }

View file

@ -0,0 +1,835 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generator
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"log"
"os"
"path"
"path/filepath"
"regexp"
goruntime "runtime"
"sort"
"strings"
yaml "gopkg.in/yaml.v2"
"github.com/go-openapi/analysis"
"github.com/go-openapi/loads"
"github.com/go-openapi/runtime"
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
)
// GenerateServer generates a server application
func GenerateServer(name string, modelNames, operationIDs []string, opts *GenOpts) error {
generator, err := newAppGenerator(name, modelNames, operationIDs, opts)
if err != nil {
return err
}
return generator.Generate()
}
// GenerateSupport generates the supporting files for an API
func GenerateSupport(name string, modelNames, operationIDs []string, opts *GenOpts) error {
generator, err := newAppGenerator(name, modelNames, operationIDs, opts)
if err != nil {
return err
}
return generator.GenerateSupport(nil)
}
func newAppGenerator(name string, modelNames, operationIDs []string, opts *GenOpts) (*appGenerator, error) {
if opts == nil {
return nil, errors.New("gen opts are required")
}
if err := opts.CheckOpts(); err != nil {
return nil, err
}
templates.LoadDefaults()
if opts.Template != "" {
if err := templates.LoadContrib(opts.Template); err != nil {
return nil, err
}
}
if opts.TemplateDir != "" {
if err := templates.LoadDir(opts.TemplateDir); err != nil {
return nil, err
}
}
// Load the spec
var err error
var specDoc *loads.Document
opts.Spec, err = findSwaggerSpec(opts.Spec)
if err != nil {
return nil, err
}
if !filepath.IsAbs(opts.Spec) {
cwd, _ := os.Getwd()
opts.Spec = filepath.Join(cwd, opts.Spec)
}
if opts.PropertiesSpecOrder {
opts.Spec = withAutoXOrder(opts.Spec)
}
opts.Spec, specDoc, err = loadSpec(opts.Spec)
if err != nil {
return nil, err
}
specDoc, err = validateAndFlattenSpec(opts, specDoc)
if err != nil {
return nil, err
}
analyzed := analysis.New(specDoc.Spec())
models, err := gatherModels(specDoc, modelNames)
if err != nil {
return nil, err
}
operations := gatherOperations(analyzed, operationIDs)
if len(operations) == 0 {
return nil, errors.New("no operations were selected")
}
defaultScheme := opts.DefaultScheme
if defaultScheme == "" {
defaultScheme = "http"
}
defaultProduces := opts.DefaultProduces
if defaultProduces == "" {
defaultProduces = runtime.JSONMime
}
defaultConsumes := opts.DefaultConsumes
if defaultConsumes == "" {
defaultConsumes = runtime.JSONMime
}
opts.Name = appNameOrDefault(specDoc, name, "swagger")
apiPackage := opts.LanguageOpts.ManglePackagePath(opts.APIPackage, "api")
return &appGenerator{
Name: opts.Name,
Receiver: "o",
SpecDoc: specDoc,
Analyzed: analyzed,
Models: models,
Operations: operations,
Target: opts.Target,
DumpData: opts.DumpData,
Package: opts.LanguageOpts.ManglePackageName(apiPackage, "api"),
APIPackage: apiPackage,
ModelsPackage: opts.LanguageOpts.ManglePackagePath(opts.ModelPackage, "definitions"),
ServerPackage: opts.LanguageOpts.ManglePackagePath(opts.ServerPackage, "server"),
ClientPackage: opts.LanguageOpts.ManglePackagePath(opts.ClientPackage, "client"),
OperationsPackage: filepath.Join(opts.LanguageOpts.ManglePackagePath(opts.ServerPackage, "server"), apiPackage),
Principal: opts.Principal,
DefaultScheme: defaultScheme,
DefaultProduces: defaultProduces,
DefaultConsumes: defaultConsumes,
GenOpts: opts,
}, nil
}
type appGenerator struct {
Name string
Receiver string
SpecDoc *loads.Document
Analyzed *analysis.Spec
Package string
APIPackage string
ModelsPackage string
ServerPackage string
ClientPackage string
OperationsPackage string
Principal string
Models map[string]spec.Schema
Operations map[string]opRef
Target string
DumpData bool
DefaultScheme string
DefaultProduces string
DefaultConsumes string
GenOpts *GenOpts
}
func withAutoXOrder(specPath string) string {
lookFor := func(ele interface{}, key string) (yaml.MapSlice, bool) {
if slice, ok := ele.(yaml.MapSlice); ok {
for _, v := range slice {
if v.Key == key {
if slice, ok := v.Value.(yaml.MapSlice); ok {
return slice, ok
}
}
}
}
return nil, false
}
var addXOrder func(interface{})
addXOrder = func(element interface{}) {
if props, ok := lookFor(element, "properties"); ok {
for i, prop := range props {
if pSlice, ok := prop.Value.(yaml.MapSlice); ok {
isObject := false
xOrderIndex := -1 //Find if x-order already exists
for i, v := range pSlice {
if v.Key == "type" && v.Value == object {
isObject = true
}
if v.Key == xOrder {
xOrderIndex = i
break
}
}
if xOrderIndex > -1 { //Override existing x-order
pSlice[xOrderIndex] = yaml.MapItem{Key: xOrder, Value: i}
} else { // append new x-order
pSlice = append(pSlice, yaml.MapItem{Key: xOrder, Value: i})
}
prop.Value = pSlice
props[i] = prop
if isObject {
addXOrder(pSlice)
}
}
}
}
}
yamlDoc, err := swag.YAMLData(specPath)
if err != nil {
panic(err)
}
if defs, ok := lookFor(yamlDoc, "definitions"); ok {
for _, def := range defs {
addXOrder(def.Value)
}
}
addXOrder(yamlDoc)
out, err := yaml.Marshal(yamlDoc)
if err != nil {
panic(err)
}
tmpFile, err := ioutil.TempFile("", filepath.Base(specPath))
if err != nil {
panic(err)
}
if err := ioutil.WriteFile(tmpFile.Name(), out, 0); err != nil {
panic(err)
}
return tmpFile.Name()
}
// 1. Checks if the child path and parent path coincide.
// 2. If they do return child path relative to parent path.
// 3. Everything else return false
func checkPrefixAndFetchRelativePath(childpath string, parentpath string) (bool, string) {
// Windows (local) file systems - NTFS, as well as FAT and variants
// are case insensitive.
cp, pp := childpath, parentpath
if goruntime.GOOS == "windows" {
cp = strings.ToLower(cp)
pp = strings.ToLower(pp)
}
if strings.HasPrefix(cp, pp) {
pth, err := filepath.Rel(parentpath, childpath)
if err != nil {
log.Fatalln(err)
}
return true, pth
}
return false, ""
}
func (a *appGenerator) Generate() error {
app, err := a.makeCodegenApp()
if err != nil {
return err
}
if a.DumpData {
bb, err := json.MarshalIndent(app, "", " ")
if err != nil {
return err
}
fmt.Fprintln(os.Stdout, string(bb))
return nil
}
// NOTE: relative to previous implem with chan.
// IPC removed concurrent execution because of the FuncMap that is being shared
// templates are now lazy loaded so there is concurrent map access I can't guard
if a.GenOpts.IncludeModel {
log.Printf("rendering %d models", len(app.Models))
for _, mod := range app.Models {
modCopy := mod
modCopy.IncludeValidator = true // a.GenOpts.IncludeValidator
modCopy.IncludeModel = true
if err := a.GenOpts.renderDefinition(&modCopy); err != nil {
return err
}
}
}
if a.GenOpts.IncludeHandler {
log.Printf("rendering %d operation groups (tags)", app.OperationGroups.Len())
for _, opg := range app.OperationGroups {
opgCopy := opg
log.Printf("rendering %d operations for %s", opg.Operations.Len(), opg.Name)
for _, op := range opgCopy.Operations {
opCopy := op
if err := a.GenOpts.renderOperation(&opCopy); err != nil {
return err
}
}
// Optional OperationGroups templates generation
opGroup := opg
opGroup.DefaultImports = app.DefaultImports
if err := a.GenOpts.renderOperationGroup(&opGroup); err != nil {
return fmt.Errorf("error while rendering operation group: %v", err)
}
}
}
if a.GenOpts.IncludeSupport {
log.Printf("rendering support")
if err := a.GenerateSupport(&app); err != nil {
return err
}
}
return nil
}
func (a *appGenerator) GenerateSupport(ap *GenApp) error {
app := ap
if ap == nil {
ca, err := a.makeCodegenApp()
if err != nil {
return err
}
app = &ca
}
baseImport := a.GenOpts.LanguageOpts.baseImport(a.Target)
importPath := path.Join(filepath.ToSlash(baseImport), a.GenOpts.LanguageOpts.ManglePackagePath(a.OperationsPackage, ""))
app.DefaultImports = append(
app.DefaultImports,
path.Join(filepath.ToSlash(baseImport), a.GenOpts.LanguageOpts.ManglePackagePath(a.ServerPackage, "")),
importPath,
)
return a.GenOpts.renderApplication(app)
}
var mediaTypeNames = map[*regexp.Regexp]string{
regexp.MustCompile("application/.*json"): "json",
regexp.MustCompile("application/.*yaml"): "yaml",
regexp.MustCompile("application/.*protobuf"): "protobuf",
regexp.MustCompile("application/.*capnproto"): "capnproto",
regexp.MustCompile("application/.*thrift"): "thrift",
regexp.MustCompile("(?:application|text)/.*xml"): "xml",
regexp.MustCompile("text/.*markdown"): "markdown",
regexp.MustCompile("text/.*html"): "html",
regexp.MustCompile("text/.*csv"): "csv",
regexp.MustCompile("text/.*tsv"): "tsv",
regexp.MustCompile("text/.*javascript"): "js",
regexp.MustCompile("text/.*css"): "css",
regexp.MustCompile("text/.*plain"): "txt",
regexp.MustCompile("application/.*octet-stream"): "bin",
regexp.MustCompile("application/.*tar"): "tar",
regexp.MustCompile("application/.*gzip"): "gzip",
regexp.MustCompile("application/.*gz"): "gzip",
regexp.MustCompile("application/.*raw-stream"): "bin",
regexp.MustCompile("application/x-www-form-urlencoded"): "urlform",
regexp.MustCompile("multipart/form-data"): "multipartform",
}
var knownProducers = map[string]string{
"json": "runtime.JSONProducer()",
"yaml": "yamlpc.YAMLProducer()",
"xml": "runtime.XMLProducer()",
"txt": "runtime.TextProducer()",
"bin": "runtime.ByteStreamProducer()",
"urlform": "runtime.DiscardProducer",
"multipartform": "runtime.DiscardProducer",
}
var knownConsumers = map[string]string{
"json": "runtime.JSONConsumer()",
"yaml": "yamlpc.YAMLConsumer()",
"xml": "runtime.XMLConsumer()",
"txt": "runtime.TextConsumer()",
"bin": "runtime.ByteStreamConsumer()",
"urlform": "runtime.DiscardConsumer",
"multipartform": "runtime.DiscardConsumer",
}
func getSerializer(sers []GenSerGroup, ext string) (*GenSerGroup, bool) {
for i := range sers {
s := &sers[i]
if s.Name == ext {
return s, true
}
}
return nil, false
}
func mediaTypeName(tn string) (string, bool) {
for k, v := range mediaTypeNames {
if k.MatchString(tn) {
return v, true
}
}
return "", false
}
func (a *appGenerator) makeConsumes() (consumes GenSerGroups, consumesJSON bool) {
reqCons := a.Analyzed.RequiredConsumes()
sort.Strings(reqCons)
for _, cons := range reqCons {
cn, ok := mediaTypeName(cons)
if !ok {
nm := swag.ToJSONName(cons)
ser := GenSerializer{
AppName: a.Name,
ReceiverName: a.Receiver,
Name: nm,
MediaType: cons,
Implementation: "",
}
consumes = append(consumes, GenSerGroup{
AppName: ser.AppName,
ReceiverName: ser.ReceiverName,
Name: ser.Name,
MediaType: cons,
AllSerializers: []GenSerializer{ser},
Implementation: ser.Implementation,
})
continue
}
nm := swag.ToJSONName(cn)
if nm == "json" {
consumesJSON = true
}
if ser, ok := getSerializer(consumes, cn); ok {
ser.AllSerializers = append(ser.AllSerializers, GenSerializer{
AppName: ser.AppName,
ReceiverName: ser.ReceiverName,
Name: ser.Name,
MediaType: cons,
Implementation: knownConsumers[nm],
})
sort.Sort(ser.AllSerializers)
continue
}
ser := GenSerializer{
AppName: a.Name,
ReceiverName: a.Receiver,
Name: nm,
MediaType: cons,
Implementation: knownConsumers[nm],
}
consumes = append(consumes, GenSerGroup{
AppName: ser.AppName,
ReceiverName: ser.ReceiverName,
Name: ser.Name,
MediaType: cons,
AllSerializers: []GenSerializer{ser},
Implementation: ser.Implementation,
})
}
if len(consumes) == 0 {
consumes = append(consumes, GenSerGroup{
AppName: a.Name,
ReceiverName: a.Receiver,
Name: "json",
MediaType: runtime.JSONMime,
AllSerializers: []GenSerializer{{
AppName: a.Name,
ReceiverName: a.Receiver,
Name: "json",
MediaType: runtime.JSONMime,
Implementation: knownConsumers["json"],
}},
Implementation: knownConsumers["json"],
})
consumesJSON = true
}
sort.Sort(consumes)
return
}
func (a *appGenerator) makeProduces() (produces GenSerGroups, producesJSON bool) {
reqProds := a.Analyzed.RequiredProduces()
sort.Strings(reqProds)
for _, prod := range reqProds {
pn, ok := mediaTypeName(prod)
if !ok {
nm := swag.ToJSONName(prod)
ser := GenSerializer{
AppName: a.Name,
ReceiverName: a.Receiver,
Name: nm,
MediaType: prod,
Implementation: "",
}
produces = append(produces, GenSerGroup{
AppName: ser.AppName,
ReceiverName: ser.ReceiverName,
Name: ser.Name,
MediaType: prod,
Implementation: ser.Implementation,
AllSerializers: []GenSerializer{ser},
})
continue
}
nm := swag.ToJSONName(pn)
if nm == "json" {
producesJSON = true
}
if ser, ok := getSerializer(produces, pn); ok {
ser.AllSerializers = append(ser.AllSerializers, GenSerializer{
AppName: ser.AppName,
ReceiverName: ser.ReceiverName,
Name: ser.Name,
MediaType: prod,
Implementation: knownProducers[nm],
})
sort.Sort(ser.AllSerializers)
continue
}
ser := GenSerializer{
AppName: a.Name,
ReceiverName: a.Receiver,
Name: nm,
MediaType: prod,
Implementation: knownProducers[nm],
}
produces = append(produces, GenSerGroup{
AppName: ser.AppName,
ReceiverName: ser.ReceiverName,
Name: ser.Name,
MediaType: prod,
Implementation: ser.Implementation,
AllSerializers: []GenSerializer{ser},
})
}
if len(produces) == 0 {
produces = append(produces, GenSerGroup{
AppName: a.Name,
ReceiverName: a.Receiver,
Name: "json",
MediaType: runtime.JSONMime,
AllSerializers: []GenSerializer{{
AppName: a.Name,
ReceiverName: a.Receiver,
Name: "json",
MediaType: runtime.JSONMime,
Implementation: knownProducers["json"],
}},
Implementation: knownProducers["json"],
})
producesJSON = true
}
sort.Sort(produces)
return
}
func (a *appGenerator) makeSecuritySchemes() GenSecuritySchemes {
if a.Principal == "" {
a.Principal = "interface{}"
}
requiredSecuritySchemes := make(map[string]spec.SecurityScheme, len(a.Analyzed.RequiredSecuritySchemes()))
for _, scheme := range a.Analyzed.RequiredSecuritySchemes() {
if req, ok := a.SpecDoc.Spec().SecurityDefinitions[scheme]; ok && req != nil {
requiredSecuritySchemes[scheme] = *req
}
}
return gatherSecuritySchemes(requiredSecuritySchemes, a.Name, a.Principal, a.Receiver)
}
func (a *appGenerator) makeCodegenApp() (GenApp, error) {
log.Println("building a plan for generation")
sw := a.SpecDoc.Spec()
receiver := a.Receiver
var defaultImports []string
jsonb, _ := json.MarshalIndent(a.SpecDoc.OrigSpec(), "", " ")
flatjsonb, _ := json.MarshalIndent(a.SpecDoc.Spec(), "", " ")
consumes, _ := a.makeConsumes()
produces, _ := a.makeProduces()
sort.Sort(consumes)
sort.Sort(produces)
security := a.makeSecuritySchemes()
baseImport := a.GenOpts.LanguageOpts.baseImport(a.Target)
var imports = make(map[string]string)
var genMods GenDefinitions
importPath := a.GenOpts.ExistingModels
if a.GenOpts.ExistingModels == "" {
imports[a.GenOpts.LanguageOpts.ManglePackageName(a.ModelsPackage, "models")] = path.Join(
filepath.ToSlash(baseImport),
a.GenOpts.LanguageOpts.ManglePackagePath(a.GenOpts.ModelPackage, "models"))
}
if importPath != "" {
defaultImports = append(defaultImports, importPath)
}
log.Println("planning definitions")
for mn, m := range a.Models {
mod, err := makeGenDefinition(
mn,
a.ModelsPackage,
m,
a.SpecDoc,
a.GenOpts,
)
if err != nil {
return GenApp{}, fmt.Errorf("error in model %s while planning definitions: %v", mn, err)
}
if mod != nil {
if !mod.External {
genMods = append(genMods, *mod)
}
// Copy model imports to operation imports
for alias, pkg := range mod.Imports {
target := a.GenOpts.LanguageOpts.ManglePackageName(alias, "")
imports[target] = pkg
}
}
}
sort.Sort(genMods)
log.Println("planning operations")
tns := make(map[string]struct{})
var genOps GenOperations
for on, opp := range a.Operations {
o := opp.Op
o.Tags = pruneEmpty(o.Tags)
o.ID = on
var bldr codeGenOpBuilder
bldr.ModelsPackage = a.ModelsPackage
bldr.Principal = a.Principal
bldr.Target = a.Target
bldr.DefaultImports = defaultImports
bldr.Imports = imports
bldr.DefaultScheme = a.DefaultScheme
bldr.Doc = a.SpecDoc
bldr.Analyzed = a.Analyzed
bldr.BasePath = a.SpecDoc.BasePath()
bldr.GenOpts = a.GenOpts
// TODO: change operation name to something safe
bldr.Name = on
bldr.Operation = *o
bldr.Method = opp.Method
bldr.Path = opp.Path
bldr.Authed = len(a.Analyzed.SecurityRequirementsFor(o)) > 0
bldr.Security = a.Analyzed.SecurityRequirementsFor(o)
bldr.SecurityDefinitions = a.Analyzed.SecurityDefinitionsFor(o)
bldr.RootAPIPackage = a.GenOpts.LanguageOpts.ManglePackageName(a.ServerPackage, "server")
bldr.IncludeValidator = true
bldr.APIPackage = a.APIPackage
st := o.Tags
if a.GenOpts != nil {
st = a.GenOpts.Tags
}
intersected := intersectTags(o.Tags, st)
if len(st) > 0 && len(intersected) == 0 {
continue
}
if len(intersected) > 0 {
tag := intersected[0]
bldr.APIPackage = a.GenOpts.LanguageOpts.ManglePackagePath(tag, a.APIPackage)
for _, t := range intersected {
tns[t] = struct{}{}
}
}
op, err := bldr.MakeOperation()
if err != nil {
return GenApp{}, err
}
op.ReceiverName = receiver
op.Tags = intersected
genOps = append(genOps, op)
}
for k := range tns {
importPath := filepath.ToSlash(
path.Join(
filepath.ToSlash(baseImport),
a.GenOpts.LanguageOpts.ManglePackagePath(a.OperationsPackage, ""),
swag.ToFileName(k)))
defaultImports = append(defaultImports, importPath)
}
sort.Sort(genOps)
log.Println("grouping operations into packages")
opsGroupedByPackage := make(map[string]GenOperations)
for _, operation := range genOps {
if operation.Package == "" {
operation.Package = a.Package
}
opsGroupedByPackage[operation.Package] = append(opsGroupedByPackage[operation.Package], operation)
}
var opGroups GenOperationGroups
for k, v := range opsGroupedByPackage {
sort.Sort(v)
// trim duplicate extra schemas within the same package
vv := make(GenOperations, 0, len(v))
seenExtraSchema := make(map[string]bool)
for _, op := range v {
uniqueExtraSchemas := make(GenSchemaList, 0, len(op.ExtraSchemas))
for _, xs := range op.ExtraSchemas {
if _, alreadyThere := seenExtraSchema[xs.Name]; !alreadyThere {
seenExtraSchema[xs.Name] = true
uniqueExtraSchemas = append(uniqueExtraSchemas, xs)
}
}
op.ExtraSchemas = uniqueExtraSchemas
vv = append(vv, op)
}
opGroup := GenOperationGroup{
GenCommon: GenCommon{
Copyright: a.GenOpts.Copyright,
TargetImportPath: filepath.ToSlash(baseImport),
},
Name: k,
Operations: vv,
DefaultImports: defaultImports,
Imports: imports,
RootPackage: a.APIPackage,
GenOpts: a.GenOpts,
}
opGroups = append(opGroups, opGroup)
var importPath string
if k == a.APIPackage {
importPath = path.Join(filepath.ToSlash(baseImport), a.GenOpts.LanguageOpts.ManglePackagePath(a.OperationsPackage, ""))
} else {
importPath = path.Join(filepath.ToSlash(baseImport), a.GenOpts.LanguageOpts.ManglePackagePath(a.OperationsPackage, ""), k)
}
defaultImports = append(defaultImports, importPath)
}
sort.Sort(opGroups)
log.Println("planning meta data and facades")
var collectedSchemes []string
var extraSchemes []string
for _, op := range genOps {
collectedSchemes = concatUnique(collectedSchemes, op.Schemes)
extraSchemes = concatUnique(extraSchemes, op.ExtraSchemes)
}
sort.Strings(collectedSchemes)
sort.Strings(extraSchemes)
host := "localhost"
if sw.Host != "" {
host = sw.Host
}
basePath := "/"
if sw.BasePath != "" {
basePath = sw.BasePath
}
return GenApp{
GenCommon: GenCommon{
Copyright: a.GenOpts.Copyright,
TargetImportPath: filepath.ToSlash(baseImport),
},
APIPackage: a.GenOpts.LanguageOpts.ManglePackageName(a.ServerPackage, "server"),
Package: a.Package,
ReceiverName: receiver,
Name: a.Name,
Host: host,
BasePath: basePath,
Schemes: schemeOrDefault(collectedSchemes, a.DefaultScheme),
ExtraSchemes: extraSchemes,
ExternalDocs: sw.ExternalDocs,
Info: sw.Info,
Consumes: consumes,
Produces: produces,
DefaultConsumes: a.DefaultConsumes,
DefaultProduces: a.DefaultProduces,
DefaultImports: defaultImports,
Imports: imports,
SecurityDefinitions: security,
Models: genMods,
Operations: genOps,
OperationGroups: opGroups,
Principal: a.Principal,
SwaggerJSON: generateReadableSpec(jsonb),
FlatSwaggerJSON: generateReadableSpec(flatjsonb),
ExcludeSpec: a.GenOpts != nil && a.GenOpts.ExcludeSpec,
GenOpts: a.GenOpts,
}, nil
}
// generateReadableSpec makes swagger json spec as a string instead of bytes
// the only character that needs to be escaped is '`' symbol, since it cannot be escaped in the GO string
// that is quoted as `string data`. The function doesn't care about the beginning or the ending of the
// string it escapes since all data that needs to be escaped is always in the middle of the swagger spec.
func generateReadableSpec(spec []byte) string {
buf := &bytes.Buffer{}
for _, b := range string(spec) {
if b == '`' {
buf.WriteString("`+\"`\"+`")
} else {
buf.WriteRune(b)
}
}
return buf.String()
}

View file

@ -0,0 +1,511 @@
package generator
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"os"
"path"
"path/filepath"
"strings"
"text/template"
"text/template/parse"
"log"
"github.com/go-openapi/inflect"
"github.com/go-openapi/swag"
"github.com/kr/pretty"
)
var templates *Repository
// FuncMap is a map with default functions for use n the templates.
// These are available in every template
var FuncMap template.FuncMap = map[string]interface{}{
"pascalize": pascalize,
"camelize": swag.ToJSONName,
"varname": golang.MangleVarName,
"humanize": swag.ToHumanNameLower,
"snakize": golang.MangleFileName,
"toPackagePath": func(name string) string {
return filepath.FromSlash(golang.ManglePackagePath(name, ""))
},
"toPackage": func(name string) string {
return golang.ManglePackagePath(name, "")
},
"toPackageName": func(name string) string {
return golang.ManglePackageName(name, "")
},
"dasherize": swag.ToCommandName,
"pluralizeFirstWord": func(arg string) string {
sentence := strings.Split(arg, " ")
if len(sentence) == 1 {
return inflect.Pluralize(arg)
}
return inflect.Pluralize(sentence[0]) + " " + strings.Join(sentence[1:], " ")
},
"json": asJSON,
"prettyjson": asPrettyJSON,
"hasInsecure": func(arg []string) bool {
return swag.ContainsStringsCI(arg, "http") || swag.ContainsStringsCI(arg, "ws")
},
"hasSecure": func(arg []string) bool {
return swag.ContainsStringsCI(arg, "https") || swag.ContainsStringsCI(arg, "wss")
},
// TODO: simplify redundant functions
"stripPackage": func(str, pkg string) string {
parts := strings.Split(str, ".")
strlen := len(parts)
if strlen > 0 {
return parts[strlen-1]
}
return str
},
"dropPackage": func(str string) string {
parts := strings.Split(str, ".")
strlen := len(parts)
if strlen > 0 {
return parts[strlen-1]
}
return str
},
"upper": strings.ToUpper,
"contains": func(coll []string, arg string) bool {
for _, v := range coll {
if v == arg {
return true
}
}
return false
},
"padSurround": func(entry, padWith string, i, ln int) string {
var res []string
if i > 0 {
for j := 0; j < i; j++ {
res = append(res, padWith)
}
}
res = append(res, entry)
tot := ln - i - 1
for j := 0; j < tot; j++ {
res = append(res, padWith)
}
return strings.Join(res, ",")
},
"joinFilePath": filepath.Join,
"comment": func(str string) string {
lines := strings.Split(str, "\n")
return (strings.Join(lines, "\n// "))
},
"blockcomment": func(str string) string {
return strings.Replace(str, "*/", "[*]/", -1)
},
"inspect": pretty.Sprint,
"cleanPath": path.Clean,
"mediaTypeName": func(orig string) string {
return strings.SplitN(orig, ";", 2)[0]
},
"goSliceInitializer": goSliceInitializer,
"hasPrefix": strings.HasPrefix,
"stringContains": strings.Contains,
}
func init() {
templates = NewRepository(FuncMap)
}
var assets = map[string][]byte{
"validation/primitive.gotmpl": MustAsset("templates/validation/primitive.gotmpl"),
"validation/customformat.gotmpl": MustAsset("templates/validation/customformat.gotmpl"),
"docstring.gotmpl": MustAsset("templates/docstring.gotmpl"),
"validation/structfield.gotmpl": MustAsset("templates/validation/structfield.gotmpl"),
"modelvalidator.gotmpl": MustAsset("templates/modelvalidator.gotmpl"),
"structfield.gotmpl": MustAsset("templates/structfield.gotmpl"),
"tupleserializer.gotmpl": MustAsset("templates/tupleserializer.gotmpl"),
"additionalpropertiesserializer.gotmpl": MustAsset("templates/additionalpropertiesserializer.gotmpl"),
"schematype.gotmpl": MustAsset("templates/schematype.gotmpl"),
"schemabody.gotmpl": MustAsset("templates/schemabody.gotmpl"),
"schema.gotmpl": MustAsset("templates/schema.gotmpl"),
"schemavalidator.gotmpl": MustAsset("templates/schemavalidator.gotmpl"),
"model.gotmpl": MustAsset("templates/model.gotmpl"),
"header.gotmpl": MustAsset("templates/header.gotmpl"),
"swagger_json_embed.gotmpl": MustAsset("templates/swagger_json_embed.gotmpl"),
"server/parameter.gotmpl": MustAsset("templates/server/parameter.gotmpl"),
"server/urlbuilder.gotmpl": MustAsset("templates/server/urlbuilder.gotmpl"),
"server/responses.gotmpl": MustAsset("templates/server/responses.gotmpl"),
"server/operation.gotmpl": MustAsset("templates/server/operation.gotmpl"),
"server/builder.gotmpl": MustAsset("templates/server/builder.gotmpl"),
"server/server.gotmpl": MustAsset("templates/server/server.gotmpl"),
"server/configureapi.gotmpl": MustAsset("templates/server/configureapi.gotmpl"),
"server/main.gotmpl": MustAsset("templates/server/main.gotmpl"),
"server/doc.gotmpl": MustAsset("templates/server/doc.gotmpl"),
"client/parameter.gotmpl": MustAsset("templates/client/parameter.gotmpl"),
"client/response.gotmpl": MustAsset("templates/client/response.gotmpl"),
"client/client.gotmpl": MustAsset("templates/client/client.gotmpl"),
"client/facade.gotmpl": MustAsset("templates/client/facade.gotmpl"),
}
var protectedTemplates = map[string]bool{
"schemabody": true,
"privtuplefield": true,
"withoutBaseTypeBody": true,
"swaggerJsonEmbed": true,
"validationCustomformat": true,
"tuplefield": true,
"header": true,
"withBaseTypeBody": true,
"primitivefieldvalidator": true,
"mapvalidator": true,
"propertyValidationDocString": true,
"typeSchemaType": true,
"docstring": true,
"dereffedSchemaType": true,
"model": true,
"modelvalidator": true,
"privstructfield": true,
"schemavalidator": true,
"tuplefieldIface": true,
"tupleSerializer": true,
"tupleserializer": true,
"schemaSerializer": true,
"propertyvalidator": true,
"structfieldIface": true,
"schemaBody": true,
"objectvalidator": true,
"schematype": true,
"additionalpropertiesserializer": true,
"slicevalidator": true,
"validationStructfield": true,
"validationPrimitive": true,
"schemaType": true,
"subTypeBody": true,
"schema": true,
"additionalPropertiesSerializer": true,
"serverDoc": true,
"structfield": true,
"hasDiscriminatedSerializer": true,
"discriminatedSerializer": true,
}
// AddFile adds a file to the default repository. It will create a new template based on the filename.
// It trims the .gotmpl from the end and converts the name using swag.ToJSONName. This will strip
// directory separators and Camelcase the next letter.
// e.g validation/primitive.gotmpl will become validationPrimitive
//
// If the file contains a definition for a template that is protected the whole file will not be added
func AddFile(name, data string) error {
return templates.addFile(name, data, false)
}
func asJSON(data interface{}) (string, error) {
b, err := json.Marshal(data)
if err != nil {
return "", err
}
return string(b), nil
}
func asPrettyJSON(data interface{}) (string, error) {
b, err := json.MarshalIndent(data, "", " ")
if err != nil {
return "", err
}
return string(b), nil
}
func goSliceInitializer(data interface{}) (string, error) {
// goSliceInitializer constructs a Go literal initializer from interface{} literals.
// e.g. []interface{}{"a", "b"} is transformed in {"a","b",}
// e.g. map[string]interface{}{ "a": "x", "b": "y"} is transformed in {"a":"x","b":"y",}.
//
// NOTE: this is currently used to construct simple slice intializers for default values.
// This allows for nicer slice initializers for slices of primitive types and avoid systematic use for json.Unmarshal().
b, err := json.Marshal(data)
if err != nil {
return "", err
}
return strings.Replace(strings.Replace(strings.Replace(string(b), "}", ",}", -1), "[", "{", -1), "]", ",}", -1), nil
}
// NewRepository creates a new template repository with the provided functions defined
func NewRepository(funcs template.FuncMap) *Repository {
repo := Repository{
files: make(map[string]string),
templates: make(map[string]*template.Template),
funcs: funcs,
}
if repo.funcs == nil {
repo.funcs = make(template.FuncMap)
}
return &repo
}
// Repository is the repository for the generator templates
type Repository struct {
files map[string]string
templates map[string]*template.Template
funcs template.FuncMap
}
// LoadDefaults will load the embedded templates
func (t *Repository) LoadDefaults() {
for name, asset := range assets {
if err := t.addFile(name, string(asset), true); err != nil {
log.Fatal(err)
}
}
}
// LoadDir will walk the specified path and add each .gotmpl file it finds to the repository
func (t *Repository) LoadDir(templatePath string) error {
err := filepath.Walk(templatePath, func(path string, info os.FileInfo, err error) error {
if strings.HasSuffix(path, ".gotmpl") {
if assetName, e := filepath.Rel(templatePath, path); e == nil {
if data, e := ioutil.ReadFile(path); e == nil {
if ee := t.AddFile(assetName, string(data)); ee != nil {
// Fatality is decided by caller
// log.Fatal(ee)
return fmt.Errorf("could not add template: %v", ee)
}
}
// Non-readable files are skipped
}
}
if err != nil {
return err
}
// Non-template files are skipped
return nil
})
if err != nil {
return fmt.Errorf("could not complete template processing in directory \"%s\": %v", templatePath, err)
}
return nil
}
// LoadContrib loads template from contrib directory
func (t *Repository) LoadContrib(name string) error {
log.Printf("loading contrib %s", name)
const pathPrefix = "templates/contrib/"
basePath := pathPrefix + name
filesAdded := 0
for _, aname := range AssetNames() {
if !strings.HasSuffix(aname, ".gotmpl") {
continue
}
if strings.HasPrefix(aname, basePath) {
target := aname[len(basePath)+1:]
err := t.addFile(target, string(MustAsset(aname)), true)
if err != nil {
return err
}
log.Printf("added contributed template %s from %s", target, aname)
filesAdded++
}
}
if filesAdded == 0 {
return fmt.Errorf("no files added from template: %s", name)
}
return nil
}
func (t *Repository) addFile(name, data string, allowOverride bool) error {
fileName := name
name = swag.ToJSONName(strings.TrimSuffix(name, ".gotmpl"))
templ, err := template.New(name).Funcs(t.funcs).Parse(data)
if err != nil {
return fmt.Errorf("failed to load template %s: %v", name, err)
}
// check if any protected templates are defined
if !allowOverride {
for _, template := range templ.Templates() {
if protectedTemplates[template.Name()] {
return fmt.Errorf("cannot overwrite protected template %s", template.Name())
}
}
}
// Add each defined template into the cache
for _, template := range templ.Templates() {
t.files[template.Name()] = fileName
t.templates[template.Name()] = template.Lookup(template.Name())
}
return nil
}
// MustGet a template by name, panics when fails
func (t *Repository) MustGet(name string) *template.Template {
tpl, err := t.Get(name)
if err != nil {
panic(err)
}
return tpl
}
// AddFile adds a file to the repository. It will create a new template based on the filename.
// It trims the .gotmpl from the end and converts the name using swag.ToJSONName. This will strip
// directory separators and Camelcase the next letter.
// e.g validation/primitive.gotmpl will become validationPrimitive
//
// If the file contains a definition for a template that is protected the whole file will not be added
func (t *Repository) AddFile(name, data string) error {
return t.addFile(name, data, false)
}
func findDependencies(n parse.Node) []string {
var deps []string
depMap := make(map[string]bool)
if n == nil {
return deps
}
switch node := n.(type) {
case *parse.ListNode:
if node != nil && node.Nodes != nil {
for _, nn := range node.Nodes {
for _, dep := range findDependencies(nn) {
depMap[dep] = true
}
}
}
case *parse.IfNode:
for _, dep := range findDependencies(node.BranchNode.List) {
depMap[dep] = true
}
for _, dep := range findDependencies(node.BranchNode.ElseList) {
depMap[dep] = true
}
case *parse.RangeNode:
for _, dep := range findDependencies(node.BranchNode.List) {
depMap[dep] = true
}
for _, dep := range findDependencies(node.BranchNode.ElseList) {
depMap[dep] = true
}
case *parse.WithNode:
for _, dep := range findDependencies(node.BranchNode.List) {
depMap[dep] = true
}
for _, dep := range findDependencies(node.BranchNode.ElseList) {
depMap[dep] = true
}
case *parse.TemplateNode:
depMap[node.Name] = true
}
for dep := range depMap {
deps = append(deps, dep)
}
return deps
}
func (t *Repository) flattenDependencies(templ *template.Template, dependencies map[string]bool) map[string]bool {
if dependencies == nil {
dependencies = make(map[string]bool)
}
deps := findDependencies(templ.Tree.Root)
for _, d := range deps {
if _, found := dependencies[d]; !found {
dependencies[d] = true
if tt := t.templates[d]; tt != nil {
dependencies = t.flattenDependencies(tt, dependencies)
}
}
dependencies[d] = true
}
return dependencies
}
func (t *Repository) addDependencies(templ *template.Template) (*template.Template, error) {
name := templ.Name()
deps := t.flattenDependencies(templ, nil)
for dep := range deps {
if dep == "" {
continue
}
tt := templ.Lookup(dep)
// Check if we have it
if tt == nil {
tt = t.templates[dep]
// Still don't have it, return an error
if tt == nil {
return templ, fmt.Errorf("could not find template %s", dep)
}
var err error
// Add it to the parse tree
templ, err = templ.AddParseTree(dep, tt.Tree)
if err != nil {
return templ, fmt.Errorf("dependency error: %v", err)
}
}
}
return templ.Lookup(name), nil
}
// Get will return the named template from the repository, ensuring that all dependent templates are loaded.
// It will return an error if a dependent template is not defined in the repository.
func (t *Repository) Get(name string) (*template.Template, error) {
templ, found := t.templates[name]
if !found {
return templ, fmt.Errorf("template doesn't exist %s", name)
}
return t.addDependencies(templ)
}
// DumpTemplates prints out a dump of all the defined templates, where they are defined and what their dependencies are.
func (t *Repository) DumpTemplates() {
buf := bytes.NewBuffer(nil)
fmt.Fprintln(buf, "\n# Templates")
for name, templ := range t.templates {
fmt.Fprintf(buf, "## %s\n", name)
fmt.Fprintf(buf, "Defined in `%s`\n", t.files[name])
if deps := findDependencies(templ.Tree.Root); len(deps) > 0 {
fmt.Fprintf(buf, "####requires \n - %v\n\n\n", strings.Join(deps, "\n - "))
}
fmt.Fprintln(buf, "\n---")
}
log.Println(buf.String())
}

View file

@ -0,0 +1,801 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generator
import (
"fmt"
"log"
"path"
"path/filepath"
"strings"
"github.com/go-openapi/loads"
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
"github.com/kr/pretty"
)
const (
iface = "interface{}"
array = "array"
file = "file"
number = "number"
integer = "integer"
boolean = "boolean"
str = "string"
object = "object"
binary = "binary"
sHTTP = "http"
body = "body"
)
// Extensions supported by go-swagger
const (
xClass = "x-class" // class name used by discriminator
xGoCustomTag = "x-go-custom-tag" // additional tag for serializers on struct fields
xGoName = "x-go-name" // name of the generated go variable
xGoType = "x-go-type" // reuse existing type (do not generate)
xIsNullable = "x-isnullable"
xNullable = "x-nullable" // turns the schema into a pointer
xOmitEmpty = "x-omitempty"
xSchemes = "x-schemes" // additional schemes supported for operations (server generation)
xOrder = "x-order" // sort order for properties (or any schema)
)
// swaggerTypeMapping contains a mapping from go type to swagger type or format
var swaggerTypeName map[string]string
func init() {
swaggerTypeName = make(map[string]string)
for k, v := range typeMapping {
swaggerTypeName[v] = k
}
}
func simpleResolvedType(tn, fmt string, items *spec.Items) (result resolvedType) {
result.SwaggerType = tn
result.SwaggerFormat = fmt
if tn == file {
// special case of swagger type "file", rendered as io.ReadCloser interface
result.IsPrimitive = true
result.GoType = formatMapping[str][binary]
result.IsStream = true
return
}
if fmt != "" {
fmtn := strings.Replace(fmt, "-", "", -1)
if fmm, ok := formatMapping[tn]; ok {
if tpe, ok := fmm[fmtn]; ok {
result.GoType = tpe
result.IsPrimitive = true
_, result.IsCustomFormatter = customFormatters[tpe]
// special case of swagger format "binary", rendered as io.ReadCloser interface
// TODO(fredbi): should set IsCustomFormatter=false when binary
result.IsStream = fmt == binary
return
}
}
}
if tpe, ok := typeMapping[tn]; ok {
result.GoType = tpe
_, result.IsPrimitive = primitives[tpe]
result.IsPrimitive = ok
return
}
if tn == array {
result.IsArray = true
result.IsPrimitive = false
result.IsCustomFormatter = false
result.IsNullable = false
if items == nil {
result.GoType = "[]" + iface
return
}
res := simpleResolvedType(items.Type, items.Format, items.Items)
result.GoType = "[]" + res.GoType
return
}
result.GoType = tn
_, result.IsPrimitive = primitives[tn]
return
}
func typeForHeader(header spec.Header) resolvedType {
return simpleResolvedType(header.Type, header.Format, header.Items)
}
func newTypeResolver(pkg string, doc *loads.Document) *typeResolver {
resolver := typeResolver{ModelsPackage: pkg, Doc: doc}
resolver.KnownDefs = make(map[string]struct{}, len(doc.Spec().Definitions))
for k, sch := range doc.Spec().Definitions {
tpe, _, _ := knownDefGoType(k, sch, nil)
resolver.KnownDefs[tpe] = struct{}{}
}
return &resolver
}
// knownDefGoType returns go type, package and package alias for definition
func knownDefGoType(def string, schema spec.Schema, clear func(string) string) (string, string, string) {
debugLog("known def type: %q", def)
ext := schema.Extensions
if nm, ok := ext.GetString(xGoName); ok {
if clear == nil {
debugLog("known def type %s no clear: %q", xGoName, nm)
return nm, "", ""
}
debugLog("known def type %s clear: %q -> %q", xGoName, nm, clear(nm))
return clear(nm), "", ""
}
v, ok := ext[xGoType]
if !ok {
if clear == nil {
debugLog("known def type no clear: %q", def)
return def, "", ""
}
debugLog("known def type clear: %q -> %q", def, clear(def))
return clear(def), "", ""
}
xt := v.(map[string]interface{})
t := xt["type"].(string)
impIface, ok := xt["import"]
if !ok {
return t, "", ""
}
imp := impIface.(map[string]interface{})
pkg := imp["package"].(string)
al, ok := imp["alias"]
var alias string
if ok {
alias = al.(string)
} else {
alias = path.Base(pkg)
}
debugLog("known def type %s no clear: %q: pkg=%s, alias=%s", xGoType, alias+"."+t, pkg, alias)
return alias + "." + t, pkg, alias
}
type typeResolver struct {
Doc *loads.Document
ModelsPackage string
ModelName string
KnownDefs map[string]struct{}
// unexported fields
keepDefinitionsPkg string
knownDefsKept map[string]struct{}
}
// NewWithModelName clones a type resolver and specifies a new model name
func (t *typeResolver) NewWithModelName(name string) *typeResolver {
tt := newTypeResolver(t.ModelsPackage, t.Doc)
tt.ModelName = name
// propagates kept definitions
tt.keepDefinitionsPkg = t.keepDefinitionsPkg
tt.knownDefsKept = t.knownDefsKept
return tt
}
// withKeepDefinitionsPackage instructs the type resolver to keep previously resolved package name for
// definitions known at the moment it is first called.
func (t *typeResolver) withKeepDefinitionsPackage(definitionsPackage string) *typeResolver {
t.keepDefinitionsPkg = definitionsPackage
t.knownDefsKept = make(map[string]struct{}, len(t.KnownDefs))
for k := range t.KnownDefs {
t.knownDefsKept[k] = struct{}{}
}
return t
}
// IsNullable hints the generator as to render the type with a pointer or not.
//
// A schema is deemed nullable (i.e. rendered by a pointer) when:
// - a custom extension says it has to be so
// - it is an object with properties
// - it is a composed object (allOf)
//
// The interpretation of Required as a mean to make a type nullable is carried on elsewhere.
func (t *typeResolver) IsNullable(schema *spec.Schema) bool {
nullable := t.isNullable(schema)
return nullable || len(schema.AllOf) > 0
}
func (t *typeResolver) resolveSchemaRef(schema *spec.Schema, isRequired bool) (returns bool, result resolvedType, err error) {
if schema.Ref.String() != "" {
debugLog("resolving ref (anon: %t, req: %t) %s", false, isRequired, schema.Ref.String())
returns = true
var ref *spec.Schema
var er error
ref, er = spec.ResolveRef(t.Doc.Spec(), &schema.Ref)
if er != nil {
debugLog("error resolving ref %s: %v", schema.Ref.String(), er)
err = er
return
}
res, er := t.ResolveSchema(ref, false, isRequired)
if er != nil {
err = er
return
}
result = res
tn := filepath.Base(schema.Ref.GetURL().Fragment)
tpe, pkg, alias := knownDefGoType(tn, *ref, t.goTypeName)
debugLog("type name %s, package %s, alias %s", tpe, pkg, alias)
if tpe != "" {
result.GoType = tpe
result.Pkg = pkg
result.PkgAlias = alias
}
result.HasDiscriminator = res.HasDiscriminator
result.IsBaseType = result.HasDiscriminator
result.IsNullable = t.IsNullable(ref)
//result.IsAliased = true
return
}
return
}
func (t *typeResolver) inferAliasing(result *resolvedType, schema *spec.Schema, isAnonymous bool, isRequired bool) {
if !isAnonymous && t.ModelName != "" {
result.AliasedType = result.GoType
result.IsAliased = true
result.GoType = t.goTypeName(t.ModelName)
}
}
func (t *typeResolver) resolveFormat(schema *spec.Schema, isAnonymous bool, isRequired bool) (returns bool, result resolvedType, err error) {
if schema.Format != "" {
// defaults to string
result.SwaggerType = str
if len(schema.Type) > 0 {
result.SwaggerType = schema.Type[0]
}
debugLog("resolving format (anon: %t, req: %t)", isAnonymous, isRequired)
schFmt := strings.Replace(schema.Format, "-", "", -1)
if fmm, ok := formatMapping[result.SwaggerType]; ok {
if tpe, ok := fmm[schFmt]; ok {
returns = true
result.GoType = tpe
_, result.IsCustomFormatter = customFormatters[tpe]
}
}
if tpe, ok := typeMapping[schFmt]; !returns && ok {
returns = true
result.GoType = tpe
_, result.IsCustomFormatter = customFormatters[tpe]
}
result.SwaggerFormat = schema.Format
t.inferAliasing(&result, schema, isAnonymous, isRequired)
// special case of swagger format "binary", rendered as io.ReadCloser interface and is therefore not a primitive type
// TODO: should set IsCustomFormatter=false in this case.
result.IsPrimitive = schFmt != binary
result.IsStream = schFmt == binary
// propagate extensions in resolvedType
result.Extensions = schema.Extensions
switch result.SwaggerType {
case str:
result.IsNullable = nullableStrfmt(schema, isRequired)
case number, integer:
result.IsNullable = nullableNumber(schema, isRequired)
default:
result.IsNullable = t.IsNullable(schema)
}
}
return
}
func (t *typeResolver) isNullable(schema *spec.Schema) bool {
check := func(extension string) (bool, bool) {
v, found := schema.Extensions[extension]
nullable, cast := v.(bool)
return nullable, found && cast
}
if nullable, ok := check(xIsNullable); ok {
return nullable
}
if nullable, ok := check(xNullable); ok {
return nullable
}
return len(schema.Properties) > 0
}
func setIsEmptyOmitted(result *resolvedType, schema *spec.Schema, tpe string) {
defaultValue := true
if tpe == array {
defaultValue = false
}
v, found := schema.Extensions[xOmitEmpty]
if !found {
result.IsEmptyOmitted = defaultValue
return
}
omitted, cast := v.(bool)
result.IsEmptyOmitted = omitted && cast
}
func (t *typeResolver) firstType(schema *spec.Schema) string {
if len(schema.Type) == 0 || schema.Type[0] == "" {
return object
}
if len(schema.Type) > 1 {
// JSON-Schema multiple types, e.g. {"type": [ "object", "array" ]} are not supported.
// TODO: should keep the first _supported_ type, e.g. skip null
log.Printf("warning: JSON-Schema type definition as array with several types is not supported in %#v. Taking the first type: %s", schema.Type, schema.Type[0])
}
return schema.Type[0]
}
func (t *typeResolver) resolveArray(schema *spec.Schema, isAnonymous, isRequired bool) (result resolvedType, err error) {
debugLog("resolving array (anon: %t, req: %t)", isAnonymous, isRequired)
result.IsArray = true
result.IsNullable = false
if schema.AdditionalItems != nil {
result.HasAdditionalItems = (schema.AdditionalItems.Allows || schema.AdditionalItems.Schema != nil)
}
if schema.Items == nil {
result.GoType = "[]" + iface
result.SwaggerType = array
result.SwaggerFormat = ""
t.inferAliasing(&result, schema, isAnonymous, isRequired)
return
}
if len(schema.Items.Schemas) > 0 {
result.IsArray = false
result.IsTuple = true
result.SwaggerType = array
result.SwaggerFormat = ""
t.inferAliasing(&result, schema, isAnonymous, isRequired)
return
}
rt, er := t.ResolveSchema(schema.Items.Schema, true, false)
if er != nil {
err = er
return
}
// override the general nullability rule from ResolveSchema():
// only complex items are nullable (when not discriminated, not forced by x-nullable)
rt.IsNullable = t.IsNullable(schema.Items.Schema) && !rt.HasDiscriminator
result.GoType = "[]" + rt.GoType
if rt.IsNullable && !strings.HasPrefix(rt.GoType, "*") {
result.GoType = "[]*" + rt.GoType
}
result.ElemType = &rt
result.SwaggerType = array
result.SwaggerFormat = ""
t.inferAliasing(&result, schema, isAnonymous, isRequired)
result.Extensions = schema.Extensions
return
}
func (t *typeResolver) goTypeName(nm string) string {
if len(t.knownDefsKept) > 0 {
// if a definitions package has been defined, already resolved definitions are
// always resolved against their original package (e.g. "models"), and not the
// current package.
// This allows complex anonymous extra schemas to reuse known definitions generated in another package.
if _, ok := t.knownDefsKept[nm]; ok {
return strings.Join([]string{t.keepDefinitionsPkg, swag.ToGoName(nm)}, ".")
}
}
if t.ModelsPackage == "" {
return swag.ToGoName(nm)
}
if _, ok := t.KnownDefs[nm]; ok {
return strings.Join([]string{t.ModelsPackage, swag.ToGoName(nm)}, ".")
}
return swag.ToGoName(nm)
}
func (t *typeResolver) resolveObject(schema *spec.Schema, isAnonymous bool) (result resolvedType, err error) {
debugLog("resolving object %s (anon: %t, req: %t)", t.ModelName, isAnonymous, false)
result.IsAnonymous = isAnonymous
result.IsBaseType = schema.Discriminator != ""
if !isAnonymous {
result.SwaggerType = object
tpe, pkg, alias := knownDefGoType(t.ModelName, *schema, t.goTypeName)
result.GoType = tpe
result.Pkg = pkg
result.PkgAlias = alias
}
if len(schema.AllOf) > 0 {
result.GoType = t.goTypeName(t.ModelName)
result.IsComplexObject = true
var isNullable bool
for _, p := range schema.AllOf {
if t.IsNullable(&p) {
isNullable = true
}
}
result.IsNullable = isNullable
result.SwaggerType = object
return
}
// if this schema has properties, build a map of property name to
// resolved type, this should also flag the object as anonymous,
// when a ref is found, the anonymous flag will be reset
if len(schema.Properties) > 0 {
result.IsNullable = t.IsNullable(schema)
result.IsComplexObject = true
// no return here, still need to check for additional properties
}
// account for additional properties
if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
sch := schema.AdditionalProperties.Schema
et, er := t.ResolveSchema(sch, sch.Ref.String() == "", false)
if er != nil {
err = er
return
}
result.IsMap = !result.IsComplexObject
result.SwaggerType = object
// only complex map elements are nullable (when not forced by x-nullable)
// TODO: figure out if required to check when not discriminated like arrays?
et.IsNullable = t.isNullable(schema.AdditionalProperties.Schema)
if et.IsNullable {
result.GoType = "map[string]*" + et.GoType
} else {
result.GoType = "map[string]" + et.GoType
}
// Resolving nullability conflicts for:
// - map[][]...[]{items}
// - map[]{aliased type}
//
// when IsMap is true and the type is a distinct definition,
// aliased type or anonymous construct generated independently.
//
// IsMapNullOverride is to be handled by the generator for special cases
// where the map element is considered non nullable and the element itself is.
//
// This allows to appreciate nullability according to the context
needsOverride := result.IsMap && (et.IsArray || (sch.Ref.String() != "" || et.IsAliased || et.IsAnonymous))
if needsOverride {
var er error
if et.IsArray {
var it resolvedType
s := sch
// resolve the last items after nested arrays
for s.Items != nil && s.Items.Schema != nil {
it, er = t.ResolveSchema(s.Items.Schema, sch.Ref.String() == "", false)
if er != nil {
return
}
s = s.Items.Schema
}
// mark an override when nullable status conflicts, i.e. when the original type is not already nullable
if !it.IsAnonymous || it.IsAnonymous && it.IsNullable {
result.IsMapNullOverride = true
}
} else {
// this locks the generator on the local nullability status
result.IsMapNullOverride = true
}
}
t.inferAliasing(&result, schema, isAnonymous, false)
result.ElemType = &et
return
}
if len(schema.Properties) > 0 {
return
}
// an object without property and without AdditionalProperties schema is rendered as interface{}
result.GoType = iface
result.IsMap = true
result.SwaggerType = object
result.IsNullable = false
result.IsInterface = len(schema.Properties) == 0
return
}
// nullableBool makes a boolean a pointer when we want to distinguish the zero value from no value set.
// This is the case when:
// - a x-nullable extension says so in the spec
// - it is **not** a read-only property
// - it is a required property
// - it has a default value
func nullableBool(schema *spec.Schema, isRequired bool) bool {
if nullable := nullableExtension(schema.Extensions); nullable != nil {
return *nullable
}
required := isRequired && schema.Default == nil && !schema.ReadOnly
optional := !isRequired && (schema.Default != nil || schema.ReadOnly)
return required || optional
}
// nullableNumber makes a number a pointer when we want to distinguish the zero value from no value set.
// This is the case when:
// - a x-nullable extension says so in the spec
// - it is **not** a read-only property
// - it is a required property
// - boundaries defines the zero value as a valid value:
// - there is a non-exclusive boundary set at the zero value of the type
// - the [min,max] range crosses the zero value of the type
func nullableNumber(schema *spec.Schema, isRequired bool) bool {
if nullable := nullableExtension(schema.Extensions); nullable != nil {
return *nullable
}
hasDefault := schema.Default != nil && !swag.IsZero(schema.Default)
isMin := schema.Minimum != nil && (*schema.Minimum != 0 || schema.ExclusiveMinimum)
bcMin := schema.Minimum != nil && *schema.Minimum == 0 && !schema.ExclusiveMinimum
isMax := schema.Minimum == nil && (schema.Maximum != nil && (*schema.Maximum != 0 || schema.ExclusiveMaximum))
bcMax := schema.Maximum != nil && *schema.Maximum == 0 && !schema.ExclusiveMaximum
isMinMax := (schema.Minimum != nil && schema.Maximum != nil && *schema.Minimum < *schema.Maximum)
bcMinMax := (schema.Minimum != nil && schema.Maximum != nil && (*schema.Minimum < 0 && 0 < *schema.Maximum))
nullable := !schema.ReadOnly && (isRequired || (hasDefault && !(isMin || isMax || isMinMax)) || bcMin || bcMax || bcMinMax)
return nullable
}
// nullableString makes a string nullable when we want to distinguish the zero value from no value set.
// This is the case when:
// - a x-nullable extension says so in the spec
// - it is **not** a read-only property
// - it is a required property
// - it has a MinLength property set to 0
// - it has a default other than "" (the zero for strings) and no MinLength or zero MinLength
func nullableString(schema *spec.Schema, isRequired bool) bool {
if nullable := nullableExtension(schema.Extensions); nullable != nil {
return *nullable
}
hasDefault := schema.Default != nil && !swag.IsZero(schema.Default)
isMin := schema.MinLength != nil && *schema.MinLength != 0
bcMin := schema.MinLength != nil && *schema.MinLength == 0
nullable := !schema.ReadOnly && (isRequired || (hasDefault && !isMin) || bcMin)
return nullable
}
func nullableStrfmt(schema *spec.Schema, isRequired bool) bool {
notBinary := schema.Format != binary
if nullable := nullableExtension(schema.Extensions); nullable != nil && notBinary {
return *nullable
}
hasDefault := schema.Default != nil && !swag.IsZero(schema.Default)
nullable := !schema.ReadOnly && (isRequired || hasDefault)
return notBinary && nullable
}
func nullableExtension(ext spec.Extensions) *bool {
if ext == nil {
return nil
}
if boolPtr := boolExtension(ext, xNullable); boolPtr != nil {
return boolPtr
}
return boolExtension(ext, xIsNullable)
}
func boolExtension(ext spec.Extensions, key string) *bool {
if v, ok := ext[key]; ok {
if bb, ok := v.(bool); ok {
return &bb
}
}
return nil
}
func (t *typeResolver) ResolveSchema(schema *spec.Schema, isAnonymous, isRequired bool) (result resolvedType, err error) {
debugLog("resolving schema (anon: %t, req: %t) %s", isAnonymous, isRequired, t.ModelName)
if schema == nil {
result.IsInterface = true
result.GoType = iface
return
}
tpe := t.firstType(schema)
defer setIsEmptyOmitted(&result, schema, tpe)
var returns bool
returns, result, err = t.resolveSchemaRef(schema, isRequired)
if returns {
if !isAnonymous {
result.IsMap = false
result.IsComplexObject = true
debugLog("not anonymous ref")
}
debugLog("returning after ref")
return
}
// special case of swagger type "file", rendered as io.ReadCloser interface
if t.firstType(schema) == file {
result.SwaggerType = file
result.IsPrimitive = true
result.IsNullable = false
result.GoType = formatMapping[str][binary]
result.IsStream = true
return
}
returns, result, err = t.resolveFormat(schema, isAnonymous, isRequired)
if returns {
debugLog("returning after resolve format: %s", pretty.Sprint(result))
return
}
result.IsNullable = t.isNullable(schema) || isRequired
switch tpe {
case array:
result, err = t.resolveArray(schema, isAnonymous, false)
return
case file, number, integer, boolean:
result.Extensions = schema.Extensions
result.GoType = typeMapping[tpe]
result.SwaggerType = tpe
t.inferAliasing(&result, schema, isAnonymous, isRequired)
switch tpe {
case boolean:
result.IsPrimitive = true
result.IsCustomFormatter = false
result.IsNullable = nullableBool(schema, isRequired)
case number, integer:
result.IsPrimitive = true
result.IsCustomFormatter = false
result.IsNullable = nullableNumber(schema, isRequired)
case file:
}
return
case str:
result.GoType = str
result.SwaggerType = str
t.inferAliasing(&result, schema, isAnonymous, isRequired)
result.IsPrimitive = true
result.IsNullable = nullableString(schema, isRequired)
result.Extensions = schema.Extensions
case object:
result, err = t.resolveObject(schema, isAnonymous)
if err != nil {
return resolvedType{}, err
}
result.HasDiscriminator = schema.Discriminator != ""
return
case "null":
result.GoType = iface
result.SwaggerType = object
result.IsNullable = false
result.IsInterface = true
return
default:
err = fmt.Errorf("unresolvable: %v (format %q)", schema.Type, schema.Format)
return
}
return result, err
}
// resolvedType is a swagger type that has been resolved and analyzed for usage
// in a template
type resolvedType struct {
IsAnonymous bool
IsArray bool
IsMap bool
IsInterface bool
IsPrimitive bool
IsCustomFormatter bool
IsAliased bool
IsNullable bool
IsStream bool
IsEmptyOmitted bool
// A tuple gets rendered as an anonymous struct with P{index} as property name
IsTuple bool
HasAdditionalItems bool
// A complex object gets rendered as a struct
IsComplexObject bool
// A polymorphic type
IsBaseType bool
HasDiscriminator bool
GoType string
Pkg string
PkgAlias string
AliasedType string
SwaggerType string
SwaggerFormat string
Extensions spec.Extensions
// The type of the element in a slice or map
ElemType *resolvedType
// IsMapNullOverride indicates that a nullable object is used within an
// aliased map. In this case, the reference is not rendered with a pointer
IsMapNullOverride bool
// IsSuperAlias indicates that the aliased type is really the same type,
// e.g. in golang, this translates to: type A = B
IsSuperAlias bool
}
func (rt *resolvedType) Zero() string {
// if type is aliased, provide zero from the aliased type
if rt.IsAliased {
if zr, ok := zeroes[rt.AliasedType]; ok {
return rt.GoType + "(" + zr + ")"
}
}
// zero function provided as native or by strfmt function
if zr, ok := zeroes[rt.GoType]; ok {
return zr
}
// map and slice initializer
if rt.IsMap {
return "make(" + rt.GoType + ", 50)"
} else if rt.IsArray {
return "make(" + rt.GoType + ", 0, 50)"
}
// object initializer
if rt.IsTuple || rt.IsComplexObject {
if rt.IsNullable {
return "new(" + rt.GoType + ")"
}
return rt.GoType + "{}"
}
// interface initializer
if rt.IsInterface {
return "nil"
}
return ""
}

View file

@ -0,0 +1,165 @@
// +build !go1.11
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scan
import (
"fmt"
"go/ast"
"log"
"regexp"
"golang.org/x/tools/go/loader"
)
type packageFilter struct {
Name string
}
func (pf *packageFilter) Matches(path string) bool {
matched, err := regexp.MatchString(pf.Name, path)
if err != nil {
log.Fatal(err)
}
return matched
}
type packageFilters []packageFilter
func (pf packageFilters) HasFilters() bool {
return len(pf) > 0
}
func (pf packageFilters) Matches(path string) bool {
for _, mod := range pf {
if mod.Matches(path) {
return true
}
}
return false
}
type classifiedProgram struct {
Meta []*ast.File
Models []*ast.File
Routes []*ast.File
Operations []*ast.File
Parameters []*ast.File
Responses []*ast.File
}
// programClassifier classifies the files of a program into buckets
// for processing by a swagger spec generator. This buckets files in
// 3 groups: Meta, Models and Operations.
//
// Each of these buckets is then processed with an appropriate parsing strategy
//
// When there are Include or Exclude filters provide they are used to limit the
// candidates prior to parsing.
// The include filters take precedence over the excludes. So when something appears
// in both filters it will be included.
type programClassifier struct {
Includes packageFilters
Excludes packageFilters
}
func (pc *programClassifier) Classify(prog *loader.Program) (*classifiedProgram, error) {
var cp classifiedProgram
for pkg, pkgInfo := range prog.AllPackages {
if Debug {
log.Printf("analyzing: %s\n", pkg.Path())
}
if pc.Includes.HasFilters() {
if !pc.Includes.Matches(pkg.Path()) {
continue
}
} else if pc.Excludes.HasFilters() {
if pc.Excludes.Matches(pkg.Path()) {
continue
}
}
for _, file := range pkgInfo.Files {
var ro, op, mt, pm, rs, mm bool // only add a particular file once
for _, comments := range file.Comments {
var seenStruct string
for _, cline := range comments.List {
if cline != nil {
matches := rxSwaggerAnnotation.FindStringSubmatch(cline.Text)
if len(matches) > 1 {
switch matches[1] {
case "route":
if !ro {
cp.Routes = append(cp.Routes, file)
ro = true
}
case "operation":
if !op {
cp.Operations = append(cp.Operations, file)
op = true
}
case "model":
if !mm {
cp.Models = append(cp.Models, file)
mm = true
}
if seenStruct == "" || seenStruct == matches[1] {
seenStruct = matches[1]
} else {
return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q", seenStruct, matches[1])
}
case "meta":
if !mt {
cp.Meta = append(cp.Meta, file)
mt = true
}
case "parameters":
if !pm {
cp.Parameters = append(cp.Parameters, file)
pm = true
}
if seenStruct == "" || seenStruct == matches[1] {
seenStruct = matches[1]
} else {
return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q", seenStruct, matches[1])
}
case "response":
if !rs {
cp.Responses = append(cp.Responses, file)
rs = true
}
if seenStruct == "" || seenStruct == matches[1] {
seenStruct = matches[1]
} else {
return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q", seenStruct, matches[1])
}
case "strfmt", "name", "discriminated", "file", "enum", "default", "alias", "type":
// TODO: perhaps collect these and pass along to avoid lookups later on
case "allOf":
case "ignore":
default:
return nil, fmt.Errorf("classifier: unknown swagger annotation %q", matches[1])
}
}
}
}
}
}
}
return &cp, nil
}

85
vendor/github.com/go-swagger/go-swagger/scan/doc.go generated vendored Normal file
View file

@ -0,0 +1,85 @@
// +build !go1.11
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*Package scan provides a scanner for go files that produces a swagger spec document.
You give it a main file and it will parse all the files that are required by that main
package to produce a swagger specification.
To use you can add a go:generate comment to your main file for example:
//go:generate swagger generate spec
The following annotations exist:
swagger:meta
The swagger:meta annotation flags a file as source for metadata about the API.
This is typically a doc.go file with your package documentation.
You can specify a Consumes and Produces key which has a new content type on each line
Schemes is a tag that is required and allows for a comma separated string composed of:
http, https, ws or wss
Host and BasePath can be specified but those values will be defaults,
they should get substituted when serving the swagger spec.
Default parameters and responses are not supported at this stage, for those you can edit the template json.
swagger:strfmt [name]
A swagger:strfmt annotation names a type as a string formatter. The name is mandatory and that is
what will be used as format name for this particular string format.
String formats should only be used for very well known formats.
swagger:model [?model name]
A swagger:model annotation optionally gets a model name as extra data on the line.
when this appears anywhere in a comment for a struct, then that struct becomes a schema
in the definitions object of swagger.
The struct gets analyzed and all the collected models are added to the tree.
The refs are tracked separately so that they can be renamed later on.
When this annotation is found to be on an interface instead of a struct, the properties are provided
through exported nullary methods.
A property of an interface model can have a Discriminator: true annotation to mark that field as
the field that will contain the discriminator value.
swagger:route [method] [path pattern] [operation id] [?tag1 tag2 tag3]
A swagger:route annotation links a path to a method.
This operation gets a unique id, which is used in various places as method name.
One such usage is in method names for client generation for example.
Because there are many routers available, this tool does not try to parse the paths
you provided to your routing library of choice. So you have to specify your path pattern
yourself in valid swagger syntax.
swagger:params [operationid1 operationid2]
Links a struct to one or more operations. The params in the resulting swagger spec can be composed of several structs.
There are no guarantees given on how property name overlaps are resolved when several structs apply to the same operation.
This tag works very similarly to the swagger:model tag except that it produces valid parameter objects instead of schema
objects.
swagger:response [?response name]
Reads a struct decorated with swagger:response and uses that information to fill up the headers and the schema for a response.
A swagger:route can specify a response name for a status code and then the matching response will be used for that operation in the swagger definition.
*/
package scan

245
vendor/github.com/go-swagger/go-swagger/scan/meta.go generated vendored Normal file
View file

@ -0,0 +1,245 @@
// +build !go1.11
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scan
import (
"encoding/json"
"fmt"
"net/mail"
"regexp"
"strings"
"github.com/go-openapi/spec"
)
func metaTOSSetter(meta *spec.Info) func([]string) {
return func(lines []string) {
meta.TermsOfService = joinDropLast(lines)
}
}
func metaConsumesSetter(meta *spec.Swagger) func([]string) {
return func(consumes []string) { meta.Consumes = consumes }
}
func metaProducesSetter(meta *spec.Swagger) func([]string) {
return func(produces []string) { meta.Produces = produces }
}
func metaSchemeSetter(meta *spec.Swagger) func([]string) {
return func(schemes []string) { meta.Schemes = schemes }
}
func metaSecuritySetter(meta *spec.Swagger) func([]map[string][]string) {
return func(secDefs []map[string][]string) { meta.Security = secDefs }
}
func metaSecurityDefinitionsSetter(meta *spec.Swagger) func(json.RawMessage) error {
return func(jsonValue json.RawMessage) error {
var jsonData spec.SecurityDefinitions
err := json.Unmarshal(jsonValue, &jsonData)
if err != nil {
return err
}
meta.SecurityDefinitions = jsonData
return nil
}
}
func metaVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
return func(jsonValue json.RawMessage) error {
var jsonData spec.Extensions
err := json.Unmarshal(jsonValue, &jsonData)
if err != nil {
return err
}
for k := range jsonData {
if !rxAllowedExtensions.MatchString(k) {
return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
}
}
meta.Extensions = jsonData
return nil
}
}
func infoVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
return func(jsonValue json.RawMessage) error {
var jsonData spec.Extensions
err := json.Unmarshal(jsonValue, &jsonData)
if err != nil {
return err
}
for k := range jsonData {
if !rxAllowedExtensions.MatchString(k) {
return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
}
}
meta.Info.Extensions = jsonData
return nil
}
}
func newMetaParser(swspec *spec.Swagger) *sectionedParser {
sp := new(sectionedParser)
if swspec.Info == nil {
swspec.Info = new(spec.Info)
}
info := swspec.Info
sp.setTitle = func(lines []string) {
tosave := joinDropLast(lines)
if len(tosave) > 0 {
tosave = rxStripTitleComments.ReplaceAllString(tosave, "")
}
info.Title = tosave
}
sp.setDescription = func(lines []string) { info.Description = joinDropLast(lines) }
sp.taggers = []tagParser{
newMultiLineTagParser("TOS", newMultilineDropEmptyParser(rxTOS, metaTOSSetter(info)), false),
newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, metaConsumesSetter(swspec)), false),
newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, metaProducesSetter(swspec)), false),
newSingleLineTagParser("Schemes", newSetSchemes(metaSchemeSetter(swspec))),
newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, metaSecuritySetter(swspec)), false),
newMultiLineTagParser("SecurityDefinitions", newYamlParser(rxSecurity, metaSecurityDefinitionsSetter(swspec)), true),
newSingleLineTagParser("Version", &setMetaSingle{swspec, rxVersion, setInfoVersion}),
newSingleLineTagParser("Host", &setMetaSingle{swspec, rxHost, setSwaggerHost}),
newSingleLineTagParser("BasePath", &setMetaSingle{swspec, rxBasePath, setSwaggerBasePath}),
newSingleLineTagParser("Contact", &setMetaSingle{swspec, rxContact, setInfoContact}),
newSingleLineTagParser("License", &setMetaSingle{swspec, rxLicense, setInfoLicense}),
newMultiLineTagParser("YAMLInfoExtensionsBlock", newYamlParser(rxInfoExtensions, infoVendorExtensibleSetter(swspec)), true),
newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, metaVendorExtensibleSetter(swspec)), true),
}
return sp
}
type setMetaSingle struct {
spec *spec.Swagger
rx *regexp.Regexp
set func(spec *spec.Swagger, lines []string) error
}
func (s *setMetaSingle) Matches(line string) bool {
return s.rx.MatchString(line)
}
func (s *setMetaSingle) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := s.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
return s.set(s.spec, []string{matches[1]})
}
return nil
}
func setSwaggerHost(swspec *spec.Swagger, lines []string) error {
lns := lines
if len(lns) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
lns = []string{"localhost"}
}
swspec.Host = lns[0]
return nil
}
func setSwaggerBasePath(swspec *spec.Swagger, lines []string) error {
var ln string
if len(lines) > 0 {
ln = lines[0]
}
swspec.BasePath = ln
return nil
}
func setInfoVersion(swspec *spec.Swagger, lines []string) error {
if len(lines) == 0 {
return nil
}
info := safeInfo(swspec)
info.Version = strings.TrimSpace(lines[0])
return nil
}
func setInfoContact(swspec *spec.Swagger, lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
contact, err := parseContactInfo(lines[0])
if err != nil {
return err
}
info := safeInfo(swspec)
info.Contact = contact
return nil
}
func parseContactInfo(line string) (*spec.ContactInfo, error) {
nameEmail, url := splitURL(line)
var name, email string
if len(nameEmail) > 0 {
addr, err := mail.ParseAddress(nameEmail)
if err != nil {
return nil, err
}
name, email = addr.Name, addr.Address
}
return &spec.ContactInfo{
URL: url,
Name: name,
Email: email,
}, nil
}
func setInfoLicense(swspec *spec.Swagger, lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
info := safeInfo(swspec)
line := lines[0]
name, url := splitURL(line)
info.License = &spec.License{
Name: name,
URL: url,
}
return nil
}
func safeInfo(swspec *spec.Swagger) *spec.Info {
if swspec.Info == nil {
swspec.Info = new(spec.Info)
}
return swspec.Info
}
// httpFTPScheme matches http://, https://, ws://, wss://
var httpFTPScheme = regexp.MustCompile("(?:(?:ht|f)tp|ws)s?://")
func splitURL(line string) (notURL, url string) {
str := strings.TrimSpace(line)
parts := httpFTPScheme.FindStringIndex(str)
if len(parts) == 0 {
if len(str) > 0 {
notURL = str
}
return
}
if len(parts) > 0 {
notURL = strings.TrimSpace(str[:parts[0]])
url = strings.TrimSpace(str[parts[0]:])
}
return
}

View file

@ -0,0 +1,84 @@
// +build !go1.11
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scan
import (
"fmt"
"go/ast"
"github.com/go-openapi/spec"
"golang.org/x/tools/go/loader"
)
func newOperationsParser(prog *loader.Program) *operationsParser {
return &operationsParser{
program: prog,
}
}
type operationsParser struct {
program *loader.Program
definitions map[string]spec.Schema
operations map[string]*spec.Operation
responses map[string]spec.Response
}
func (op *operationsParser) Parse(gofile *ast.File, target interface{}, includeTags map[string]bool, excludeTags map[string]bool) error {
tgt := target.(*spec.Paths)
for _, comsec := range gofile.Comments {
content := parsePathAnnotation(rxOperation, comsec.List)
if content.Method == "" {
continue // it's not, next!
}
if !shouldAcceptTag(content.Tags, includeTags, excludeTags) {
if Debug {
fmt.Printf("operation %s %s is ignored due to tag rules\n", content.Method, content.Path)
}
continue
}
pthObj := tgt.Paths[content.Path]
op := setPathOperation(
content.Method, content.ID,
&pthObj, op.operations[content.ID])
op.Tags = content.Tags
sp := new(yamlSpecScanner)
sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
if err := sp.Parse(content.Remaining); err != nil {
return fmt.Errorf("operation (%s): %v", op.ID, err)
}
if err := sp.UnmarshalSpec(op.UnmarshalJSON); err != nil {
return fmt.Errorf("operation (%s): %v", op.ID, err)
}
if tgt.Paths == nil {
tgt.Paths = make(map[string]spec.PathItem)
}
tgt.Paths[content.Path] = pthObj
}
return nil
}

View file

@ -0,0 +1,506 @@
// +build !go1.11
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scan
import (
"fmt"
"go/ast"
"strings"
"github.com/go-openapi/spec"
"golang.org/x/tools/go/loader"
)
type operationValidationBuilder interface {
validationBuilder
SetCollectionFormat(string)
}
type paramTypable struct {
param *spec.Parameter
}
func (pt paramTypable) Level() int { return 0 }
func (pt paramTypable) Typed(tpe, format string) {
pt.param.Typed(tpe, format)
}
func (pt paramTypable) SetRef(ref spec.Ref) {
pt.param.Ref = ref
}
func (pt paramTypable) Items() swaggerTypable {
bdt, schema := bodyTypable(pt.param.In, pt.param.Schema)
if bdt != nil {
pt.param.Schema = schema
return bdt
}
if pt.param.Items == nil {
pt.param.Items = new(spec.Items)
}
pt.param.Type = "array"
return itemsTypable{pt.param.Items, 1}
}
func (pt paramTypable) Schema() *spec.Schema {
if pt.param.In != "body" {
return nil
}
if pt.param.Schema == nil {
pt.param.Schema = new(spec.Schema)
}
return pt.param.Schema
}
type itemsTypable struct {
items *spec.Items
level int
}
func (pt itemsTypable) Level() int { return pt.level }
func (pt itemsTypable) Typed(tpe, format string) {
pt.items.Typed(tpe, format)
}
func (pt itemsTypable) SetRef(ref spec.Ref) {
pt.items.Ref = ref
}
func (pt itemsTypable) Schema() *spec.Schema {
return nil
}
func (pt itemsTypable) Items() swaggerTypable {
if pt.items.Items == nil {
pt.items.Items = new(spec.Items)
}
pt.items.Type = "array"
return itemsTypable{pt.items.Items, pt.level + 1}
}
type paramValidations struct {
current *spec.Parameter
}
func (sv paramValidations) SetMaximum(val float64, exclusive bool) {
sv.current.Maximum = &val
sv.current.ExclusiveMaximum = exclusive
}
func (sv paramValidations) SetMinimum(val float64, exclusive bool) {
sv.current.Minimum = &val
sv.current.ExclusiveMinimum = exclusive
}
func (sv paramValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
func (sv paramValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
func (sv paramValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
func (sv paramValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
func (sv paramValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
func (sv paramValidations) SetPattern(val string) { sv.current.Pattern = val }
func (sv paramValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
func (sv paramValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
func (sv paramValidations) SetEnum(val string) {
sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
}
func (sv paramValidations) SetDefault(val interface{}) { sv.current.Default = val }
func (sv paramValidations) SetExample(val interface{}) { sv.current.Example = val }
type itemsValidations struct {
current *spec.Items
}
func (sv itemsValidations) SetMaximum(val float64, exclusive bool) {
sv.current.Maximum = &val
sv.current.ExclusiveMaximum = exclusive
}
func (sv itemsValidations) SetMinimum(val float64, exclusive bool) {
sv.current.Minimum = &val
sv.current.ExclusiveMinimum = exclusive
}
func (sv itemsValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
func (sv itemsValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
func (sv itemsValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
func (sv itemsValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
func (sv itemsValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
func (sv itemsValidations) SetPattern(val string) { sv.current.Pattern = val }
func (sv itemsValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
func (sv itemsValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
func (sv itemsValidations) SetEnum(val string) {
sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
}
func (sv itemsValidations) SetDefault(val interface{}) { sv.current.Default = val }
func (sv itemsValidations) SetExample(val interface{}) { sv.current.Example = val }
type paramDecl struct {
File *ast.File
Decl *ast.GenDecl
TypeSpec *ast.TypeSpec
OperationIDs []string
}
func (sd *paramDecl) inferOperationIDs() (opids []string) {
if len(sd.OperationIDs) > 0 {
opids = sd.OperationIDs
return
}
if sd.Decl.Doc != nil {
for _, cmt := range sd.Decl.Doc.List {
for _, ln := range strings.Split(cmt.Text, "\n") {
matches := rxParametersOverride.FindStringSubmatch(ln)
if len(matches) > 1 && len(matches[1]) > 0 {
for _, pt := range strings.Split(matches[1], " ") {
tr := strings.TrimSpace(pt)
if len(tr) > 0 {
opids = append(opids, tr)
}
}
}
}
}
}
sd.OperationIDs = append(sd.OperationIDs, opids...)
return
}
func newParameterParser(prog *loader.Program) *paramStructParser {
scp := new(paramStructParser)
scp.program = prog
scp.scp = newSchemaParser(prog)
return scp
}
type paramStructParser struct {
program *loader.Program
postDecls []schemaDecl
scp *schemaParser
}
// Parse will traverse a file and look for parameters.
func (pp *paramStructParser) Parse(gofile *ast.File, target interface{}) error {
tgt := target.(map[string]*spec.Operation)
for _, decl := range gofile.Decls {
switch x1 := decl.(type) {
// Check for parameters at the package level.
case *ast.GenDecl:
for _, spc := range x1.Specs {
switch x2 := spc.(type) {
case *ast.TypeSpec:
sd := paramDecl{gofile, x1, x2, nil}
sd.inferOperationIDs()
if err := pp.parseDecl(tgt, sd); err != nil {
return err
}
}
}
// Check for parameters inside functions.
case *ast.FuncDecl:
for _, b := range x1.Body.List {
switch x2 := b.(type) {
case *ast.DeclStmt:
switch x3 := x2.Decl.(type) {
case *ast.GenDecl:
for _, spc := range x3.Specs {
switch x4 := spc.(type) {
case *ast.TypeSpec:
sd := paramDecl{gofile, x3, x4, nil}
sd.inferOperationIDs()
if err := pp.parseDecl(tgt, sd); err != nil {
return err
}
}
}
}
}
}
}
}
return nil
}
func (pp *paramStructParser) parseDecl(operations map[string]*spec.Operation, decl paramDecl) error {
// check if there is a swagger:parameters tag that is followed by one or more words,
// these words are the ids of the operations this parameter struct applies to
// once type name is found convert it to a schema, by looking up the schema in the
// parameters dictionary that got passed into this parse method
for _, opid := range decl.inferOperationIDs() {
operation, ok := operations[opid]
if !ok {
operation = new(spec.Operation)
operations[opid] = operation
operation.ID = opid
}
// analyze struct body for fields etc
// each exported struct field:
// * gets a type mapped to a go primitive
// * perhaps gets a format
// * has to document the validations that apply for the type and the field
// * when the struct field points to a model it becomes a ref: #/definitions/ModelName
// * comments that aren't tags is used as the description
if tpe, ok := decl.TypeSpec.Type.(*ast.StructType); ok {
if err := pp.parseStructType(decl.File, operation, tpe, make(map[string]spec.Parameter)); err != nil {
return err
}
}
//operations[opid] = operation
}
return nil
}
func (pp *paramStructParser) parseEmbeddedStruct(gofile *ast.File, operation *spec.Operation, expr ast.Expr, seenPreviously map[string]spec.Parameter) error {
switch tpe := expr.(type) {
case *ast.Ident:
// do lookup of type
// take primitives into account, they should result in an error for swagger
pkg, err := pp.scp.packageForFile(gofile, tpe)
if err != nil {
return fmt.Errorf("embedded struct: %v", err)
}
file, _, ts, err := findSourceFile(pkg, tpe.Name)
if err != nil {
return fmt.Errorf("embedded struct: %v", err)
}
if st, ok := ts.Type.(*ast.StructType); ok {
return pp.parseStructType(file, operation, st, seenPreviously)
}
case *ast.SelectorExpr:
// look up package, file and then type
pkg, err := pp.scp.packageForSelector(gofile, tpe.X)
if err != nil {
return fmt.Errorf("embedded struct: %v", err)
}
file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name)
if err != nil {
return fmt.Errorf("embedded struct: %v", err)
}
if st, ok := ts.Type.(*ast.StructType); ok {
return pp.parseStructType(file, operation, st, seenPreviously)
}
case *ast.StarExpr:
return pp.parseEmbeddedStruct(gofile, operation, tpe.X, seenPreviously)
}
fmt.Printf("3%#v\n", expr)
return fmt.Errorf("unable to resolve embedded struct for: %v", expr)
}
func (pp *paramStructParser) parseStructType(gofile *ast.File, operation *spec.Operation, tpe *ast.StructType, seenPreviously map[string]spec.Parameter) error {
if tpe.Fields != nil {
pt := seenPreviously
for _, fld := range tpe.Fields.List {
if len(fld.Names) == 0 {
// when the embedded struct is annotated with swagger:allOf it will be used as allOf property
// otherwise the fields will just be included as normal properties
if err := pp.parseEmbeddedStruct(gofile, operation, fld.Type, pt); err != nil {
return err
}
}
}
// a slice used to keep track of the sequence of the map keys, as maps does not keep to any specific sequence (since Go-1.4)
sequence := []string{}
for _, fld := range tpe.Fields.List {
if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() {
gnm := fld.Names[0].Name
nm, ignore, _, err := parseJSONTag(fld)
if err != nil {
return err
}
if ignore {
continue
}
in := "query"
// scan for param location first, this changes some behavior down the line
if fld.Doc != nil {
for _, cmt := range fld.Doc.List {
for _, line := range strings.Split(cmt.Text, "\n") {
matches := rxIn.FindStringSubmatch(line)
if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
in = strings.TrimSpace(matches[1])
}
}
}
}
ps := pt[nm]
ps.In = in
var pty swaggerTypable = paramTypable{&ps}
if in == "body" {
pty = schemaTypable{pty.Schema(), 0}
}
if in == "formData" && fld.Doc != nil && fileParam(fld.Doc) {
pty.Typed("file", "")
} else {
if err := pp.scp.parseNamedType(gofile, fld.Type, pty); err != nil {
return err
}
}
if strfmtName, ok := strfmtName(fld.Doc); ok {
ps.Typed("string", strfmtName)
ps.Ref = spec.Ref{}
}
sp := new(sectionedParser)
sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
if ps.Ref.String() == "" {
sp.taggers = []tagParser{
newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
newSingleLineTagParser("maximum", &setMaximum{paramValidations{&ps}, rxf(rxMaximumFmt, "")}),
newSingleLineTagParser("minimum", &setMinimum{paramValidations{&ps}, rxf(rxMinimumFmt, "")}),
newSingleLineTagParser("multipleOf", &setMultipleOf{paramValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
newSingleLineTagParser("minLength", &setMinLength{paramValidations{&ps}, rxf(rxMinLengthFmt, "")}),
newSingleLineTagParser("maxLength", &setMaxLength{paramValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
newSingleLineTagParser("pattern", &setPattern{paramValidations{&ps}, rxf(rxPatternFmt, "")}),
newSingleLineTagParser("collectionFormat", &setCollectionFormat{paramValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
newSingleLineTagParser("minItems", &setMinItems{paramValidations{&ps}, rxf(rxMinItemsFmt, "")}),
newSingleLineTagParser("maxItems", &setMaxItems{paramValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
newSingleLineTagParser("unique", &setUnique{paramValidations{&ps}, rxf(rxUniqueFmt, "")}),
newSingleLineTagParser("enum", &setEnum{paramValidations{&ps}, rxf(rxEnumFmt, "")}),
newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxDefaultFmt, "")}),
newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxExampleFmt, "")}),
newSingleLineTagParser("required", &setRequiredParam{&ps}),
}
itemsTaggers := func(items *spec.Items, level int) []tagParser {
// the expression is 1-index based not 0-index
itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
return []tagParser{
newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
}
}
var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
if items == nil {
return []tagParser{}, nil
}
switch iftpe := expr.(type) {
case *ast.ArrayType:
eleTaggers := itemsTaggers(items, level)
sp.taggers = append(eleTaggers, sp.taggers...)
otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
if err != nil {
return nil, err
}
return otherTaggers, nil
case *ast.SelectorExpr:
otherTaggers, err := parseArrayTypes(iftpe.Sel, items.Items, level+1)
if err != nil {
return nil, err
}
return otherTaggers, nil
case *ast.Ident:
taggers := []tagParser{}
if iftpe.Obj == nil {
taggers = itemsTaggers(items, level)
}
otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
if err != nil {
return nil, err
}
return append(taggers, otherTaggers...), nil
case *ast.StarExpr:
otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
if err != nil {
return nil, err
}
return otherTaggers, nil
default:
return nil, fmt.Errorf("unknown field type ele for %q", nm)
}
}
// check if this is a primitive, if so parse the validations from the
// doc comments of the slice declaration.
if ftped, ok := fld.Type.(*ast.ArrayType); ok {
taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
if err != nil {
return err
}
sp.taggers = append(taggers, sp.taggers...)
}
} else {
sp.taggers = []tagParser{
newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
newSingleLineTagParser("required", &matchOnlyParam{&ps, rxRequired}),
}
}
if err := sp.Parse(fld.Doc); err != nil {
return err
}
if ps.In == "path" {
ps.Required = true
}
if ps.Name == "" {
ps.Name = nm
}
if nm != gnm {
addExtension(&ps.VendorExtensible, "x-go-name", gnm)
}
pt[nm] = ps
sequence = append(sequence, nm)
}
}
for _, k := range sequence {
p := pt[k]
for i, v := range operation.Parameters {
if v.Name == k {
operation.Parameters = append(operation.Parameters[:i], operation.Parameters[i+1:]...)
break
}
}
operation.Parameters = append(operation.Parameters, p)
}
}
return nil
}
func isAliasParam(prop swaggerTypable) bool {
var isParam bool
if param, ok := prop.(paramTypable); ok {
isParam = param.param.In == "query" ||
param.param.In == "path" ||
param.param.In == "formData"
}
return isParam
}

150
vendor/github.com/go-swagger/go-swagger/scan/path.go generated vendored Normal file
View file

@ -0,0 +1,150 @@
// +build !go1.11
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scan
import (
"go/ast"
"regexp"
"strings"
"github.com/go-openapi/spec"
)
type parsedPathContent struct {
Method, Path, ID string
Tags []string
Remaining *ast.CommentGroup
}
func parsePathAnnotation(annotation *regexp.Regexp, lines []*ast.Comment) (cnt parsedPathContent) {
var justMatched bool
for _, cmt := range lines {
for _, line := range strings.Split(cmt.Text, "\n") {
matches := annotation.FindStringSubmatch(line)
if len(matches) > 3 {
cnt.Method, cnt.Path, cnt.ID = matches[1], matches[2], matches[len(matches)-1]
cnt.Tags = rxSpace.Split(matches[3], -1)
if len(matches[3]) == 0 {
cnt.Tags = nil
}
justMatched = true
} else if cnt.Method != "" {
if cnt.Remaining == nil {
cnt.Remaining = new(ast.CommentGroup)
}
if !justMatched || strings.TrimSpace(rxStripComments.ReplaceAllString(line, "")) != "" {
cc := new(ast.Comment)
cc.Slash = cmt.Slash
cc.Text = line
cnt.Remaining.List = append(cnt.Remaining.List, cc)
justMatched = false
}
}
}
}
return
}
func setPathOperation(method, id string, pthObj *spec.PathItem, op *spec.Operation) *spec.Operation {
if op == nil {
op = new(spec.Operation)
op.ID = id
}
switch strings.ToUpper(method) {
case "GET":
if pthObj.Get != nil {
if id == pthObj.Get.ID {
op = pthObj.Get
} else {
pthObj.Get = op
}
} else {
pthObj.Get = op
}
case "POST":
if pthObj.Post != nil {
if id == pthObj.Post.ID {
op = pthObj.Post
} else {
pthObj.Post = op
}
} else {
pthObj.Post = op
}
case "PUT":
if pthObj.Put != nil {
if id == pthObj.Put.ID {
op = pthObj.Put
} else {
pthObj.Put = op
}
} else {
pthObj.Put = op
}
case "PATCH":
if pthObj.Patch != nil {
if id == pthObj.Patch.ID {
op = pthObj.Patch
} else {
pthObj.Patch = op
}
} else {
pthObj.Patch = op
}
case "HEAD":
if pthObj.Head != nil {
if id == pthObj.Head.ID {
op = pthObj.Head
} else {
pthObj.Head = op
}
} else {
pthObj.Head = op
}
case "DELETE":
if pthObj.Delete != nil {
if id == pthObj.Delete.ID {
op = pthObj.Delete
} else {
pthObj.Delete = op
}
} else {
pthObj.Delete = op
}
case "OPTIONS":
if pthObj.Options != nil {
if id == pthObj.Options.ID {
op = pthObj.Options
} else {
pthObj.Options = op
}
} else {
pthObj.Options = op
}
}
return op
}

View file

@ -0,0 +1,447 @@
// +build !go1.11
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scan
import (
"fmt"
"go/ast"
"strings"
"golang.org/x/tools/go/loader"
"github.com/go-openapi/spec"
)
type responseTypable struct {
in string
header *spec.Header
response *spec.Response
}
func (ht responseTypable) Level() int { return 0 }
func (ht responseTypable) Typed(tpe, format string) {
ht.header.Typed(tpe, format)
}
func bodyTypable(in string, schema *spec.Schema) (swaggerTypable, *spec.Schema) {
if in == "body" {
// get the schema for items on the schema property
if schema == nil {
schema = new(spec.Schema)
}
if schema.Items == nil {
schema.Items = new(spec.SchemaOrArray)
}
if schema.Items.Schema == nil {
schema.Items.Schema = new(spec.Schema)
}
schema.Typed("array", "")
return schemaTypable{schema.Items.Schema, 0}, schema
}
return nil, nil
}
func (ht responseTypable) Items() swaggerTypable {
bdt, schema := bodyTypable(ht.in, ht.response.Schema)
if bdt != nil {
ht.response.Schema = schema
return bdt
}
if ht.header.Items == nil {
ht.header.Items = new(spec.Items)
}
ht.header.Type = "array"
return itemsTypable{ht.header.Items, 1}
}
func (ht responseTypable) SetRef(ref spec.Ref) {
// having trouble seeing the usefulness of this one here
ht.Schema().Ref = ref
}
func (ht responseTypable) Schema() *spec.Schema {
if ht.response.Schema == nil {
ht.response.Schema = new(spec.Schema)
}
return ht.response.Schema
}
func (ht responseTypable) SetSchema(schema *spec.Schema) {
ht.response.Schema = schema
}
func (ht responseTypable) CollectionOf(items *spec.Items, format string) {
ht.header.CollectionOf(items, format)
}
type headerValidations struct {
current *spec.Header
}
func (sv headerValidations) SetMaximum(val float64, exclusive bool) {
sv.current.Maximum = &val
sv.current.ExclusiveMaximum = exclusive
}
func (sv headerValidations) SetMinimum(val float64, exclusive bool) {
sv.current.Minimum = &val
sv.current.ExclusiveMinimum = exclusive
}
func (sv headerValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
func (sv headerValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
func (sv headerValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
func (sv headerValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
func (sv headerValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
func (sv headerValidations) SetPattern(val string) { sv.current.Pattern = val }
func (sv headerValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
func (sv headerValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
func (sv headerValidations) SetEnum(val string) {
sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
}
func (sv headerValidations) SetDefault(val interface{}) { sv.current.Default = val }
func (sv headerValidations) SetExample(val interface{}) { sv.current.Example = val }
func newResponseDecl(file *ast.File, decl *ast.GenDecl, ts *ast.TypeSpec) responseDecl {
var rd responseDecl
rd.File = file
rd.Decl = decl
rd.TypeSpec = ts
rd.inferNames()
return rd
}
type responseDecl struct {
File *ast.File
Decl *ast.GenDecl
TypeSpec *ast.TypeSpec
GoName string
Name string
annotated bool
}
func (sd *responseDecl) hasAnnotation() bool {
sd.inferNames()
return sd.annotated
}
func (sd *responseDecl) inferNames() (goName string, name string) {
if sd.GoName != "" {
goName, name = sd.GoName, sd.Name
return
}
goName = sd.TypeSpec.Name.Name
name = goName
if sd.Decl.Doc != nil {
DECLS:
for _, cmt := range sd.Decl.Doc.List {
for _, ln := range strings.Split(cmt.Text, "\n") {
matches := rxResponseOverride.FindStringSubmatch(ln)
if len(matches) > 0 {
sd.annotated = true
}
if len(matches) > 1 && len(matches[1]) > 0 {
name = matches[1]
break DECLS
}
}
}
}
sd.GoName = goName
sd.Name = name
return
}
func newResponseParser(prog *loader.Program) *responseParser {
return &responseParser{prog, nil, newSchemaParser(prog)}
}
type responseParser struct {
program *loader.Program
postDecls []schemaDecl
scp *schemaParser
}
func (rp *responseParser) Parse(gofile *ast.File, target interface{}) error {
tgt := target.(map[string]spec.Response)
for _, decl := range gofile.Decls {
switch x1 := decl.(type) {
// Check for parameters at the package level.
case *ast.GenDecl:
for _, spc := range x1.Specs {
switch x2 := spc.(type) {
case *ast.TypeSpec:
sd := newResponseDecl(gofile, x1, x2)
if sd.hasAnnotation() {
if err := rp.parseDecl(tgt, sd); err != nil {
return err
}
}
}
}
// Check for parameters inside functions.
case *ast.FuncDecl:
for _, b := range x1.Body.List {
switch x2 := b.(type) {
case *ast.DeclStmt:
switch x3 := x2.Decl.(type) {
case *ast.GenDecl:
for _, spc := range x3.Specs {
switch x4 := spc.(type) {
case *ast.TypeSpec:
sd := newResponseDecl(gofile, x3, x4)
if sd.hasAnnotation() {
if err := rp.parseDecl(tgt, sd); err != nil {
return err
}
}
}
}
}
}
}
}
}
return nil
}
func (rp *responseParser) parseDecl(responses map[string]spec.Response, decl responseDecl) error {
// check if there is a swagger:parameters tag that is followed by one or more words,
// these words are the ids of the operations this parameter struct applies to
// once type name is found convert it to a schema, by looking up the schema in the
// parameters dictionary that got passed into this parse method
response := responses[decl.Name]
resPtr := &response
// analyze doc comment for the model
sp := new(sectionedParser)
sp.setDescription = func(lines []string) { resPtr.Description = joinDropLast(lines) }
if err := sp.Parse(decl.Decl.Doc); err != nil {
return err
}
// analyze struct body for fields etc
// each exported struct field:
// * gets a type mapped to a go primitive
// * perhaps gets a format
// * has to document the validations that apply for the type and the field
// * when the struct field points to a model it becomes a ref: #/definitions/ModelName
// * comments that aren't tags is used as the description
if tpe, ok := decl.TypeSpec.Type.(*ast.StructType); ok {
if err := rp.parseStructType(decl.File, resPtr, tpe, make(map[string]struct{})); err != nil {
return err
}
}
responses[decl.Name] = response
return nil
}
func (rp *responseParser) parseEmbeddedStruct(gofile *ast.File, response *spec.Response, expr ast.Expr, seenPreviously map[string]struct{}) error {
switch tpe := expr.(type) {
case *ast.Ident:
// do lookup of type
// take primitives into account, they should result in an error for swagger
pkg, err := rp.scp.packageForFile(gofile, tpe)
if err != nil {
return fmt.Errorf("embedded struct: %v", err)
}
file, _, ts, err := findSourceFile(pkg, tpe.Name)
if err != nil {
return fmt.Errorf("embedded struct: %v", err)
}
if st, ok := ts.Type.(*ast.StructType); ok {
return rp.parseStructType(file, response, st, seenPreviously)
}
case *ast.SelectorExpr:
// look up package, file and then type
pkg, err := rp.scp.packageForSelector(gofile, tpe.X)
if err != nil {
return fmt.Errorf("embedded struct: %v", err)
}
file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name)
if err != nil {
return fmt.Errorf("embedded struct: %v", err)
}
if st, ok := ts.Type.(*ast.StructType); ok {
return rp.parseStructType(file, response, st, seenPreviously)
}
case *ast.StarExpr:
return rp.parseEmbeddedStruct(gofile, response, tpe.X, seenPreviously)
}
fmt.Printf("1%#v\n", expr)
return fmt.Errorf("unable to resolve embedded struct for: %v", expr)
}
func (rp *responseParser) parseStructType(gofile *ast.File, response *spec.Response, tpe *ast.StructType, seenPreviously map[string]struct{}) error {
if tpe.Fields != nil {
seenProperties := seenPreviously
for _, fld := range tpe.Fields.List {
if len(fld.Names) == 0 {
// when the embedded struct is annotated with swagger:allOf it will be used as allOf property
// otherwise the fields will just be included as normal properties
if err := rp.parseEmbeddedStruct(gofile, response, fld.Type, seenProperties); err != nil {
return err
}
}
}
for _, fld := range tpe.Fields.List {
if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() {
nm, ignore, _, err := parseJSONTag(fld)
if err != nil {
return err
}
if ignore {
continue
}
var in string
// scan for param location first, this changes some behavior down the line
if fld.Doc != nil {
for _, cmt := range fld.Doc.List {
for _, line := range strings.Split(cmt.Text, "\n") {
matches := rxIn.FindStringSubmatch(line)
if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
in = strings.TrimSpace(matches[1])
}
}
}
}
ps := response.Headers[nm]
// support swagger:file for response
// An API operation can return a file, such as an image or PDF. In this case,
// define the response schema with type: file and specify the appropriate MIME types in the produces section.
if fld.Doc != nil && fileParam(fld.Doc) {
response.Schema = &spec.Schema{}
response.Schema.Typed("file", "")
} else if err := rp.scp.parseNamedType(gofile, fld.Type, responseTypable{in, &ps, response}); err != nil {
return err
}
if strfmtName, ok := strfmtName(fld.Doc); ok {
ps.Typed("string", strfmtName)
}
sp := new(sectionedParser)
sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
sp.taggers = []tagParser{
newSingleLineTagParser("maximum", &setMaximum{headerValidations{&ps}, rxf(rxMaximumFmt, "")}),
newSingleLineTagParser("minimum", &setMinimum{headerValidations{&ps}, rxf(rxMinimumFmt, "")}),
newSingleLineTagParser("multipleOf", &setMultipleOf{headerValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
newSingleLineTagParser("minLength", &setMinLength{headerValidations{&ps}, rxf(rxMinLengthFmt, "")}),
newSingleLineTagParser("maxLength", &setMaxLength{headerValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
newSingleLineTagParser("pattern", &setPattern{headerValidations{&ps}, rxf(rxPatternFmt, "")}),
newSingleLineTagParser("collectionFormat", &setCollectionFormat{headerValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
newSingleLineTagParser("minItems", &setMinItems{headerValidations{&ps}, rxf(rxMinItemsFmt, "")}),
newSingleLineTagParser("maxItems", &setMaxItems{headerValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
newSingleLineTagParser("unique", &setUnique{headerValidations{&ps}, rxf(rxUniqueFmt, "")}),
newSingleLineTagParser("enum", &setEnum{headerValidations{&ps}, rxf(rxEnumFmt, "")}),
newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxDefaultFmt, "")}),
newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxExampleFmt, "")}),
}
itemsTaggers := func(items *spec.Items, level int) []tagParser {
// the expression is 1-index based not 0-index
itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
return []tagParser{
newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
}
}
var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
if items == nil {
return []tagParser{}, nil
}
switch iftpe := expr.(type) {
case *ast.ArrayType:
eleTaggers := itemsTaggers(items, level)
sp.taggers = append(eleTaggers, sp.taggers...)
otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
if err != nil {
return nil, err
}
return otherTaggers, nil
case *ast.Ident:
taggers := []tagParser{}
if iftpe.Obj == nil {
taggers = itemsTaggers(items, level)
}
otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
if err != nil {
return nil, err
}
return append(taggers, otherTaggers...), nil
case *ast.StarExpr:
otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
if err != nil {
return nil, err
}
return otherTaggers, nil
default:
return nil, fmt.Errorf("unknown field type ele for %q", nm)
}
}
// check if this is a primitive, if so parse the validations from the
// doc comments of the slice declaration.
if ftped, ok := fld.Type.(*ast.ArrayType); ok {
taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
if err != nil {
return err
}
sp.taggers = append(taggers, sp.taggers...)
}
if err := sp.Parse(fld.Doc); err != nil {
return err
}
if in != "body" {
seenProperties[nm] = struct{}{}
if response.Headers == nil {
response.Headers = make(map[string]spec.Header)
}
response.Headers[nm] = ps
}
}
}
for k := range response.Headers {
if _, ok := seenProperties[k]; !ok {
delete(response.Headers, k)
}
}
}
return nil
}

View file

@ -0,0 +1,252 @@
// +build !go1.11
package scan
import (
"errors"
"strconv"
"strings"
"github.com/go-openapi/spec"
)
const (
// ParamDescriptionKey indicates the tag used to define a parameter description in swagger:route
ParamDescriptionKey = "description"
// ParamNameKey indicates the tag used to define a parameter name in swagger:route
ParamNameKey = "name"
// ParamInKey indicates the tag used to define a parameter location in swagger:route
ParamInKey = "in"
// ParamRequiredKey indicates the tag used to declare whether a parameter is required in swagger:route
ParamRequiredKey = "required"
// ParamTypeKey indicates the tag used to define the parameter type in swagger:route
ParamTypeKey = "type"
// ParamAllowEmptyKey indicates the tag used to indicate whether a parameter allows empty values in swagger:route
ParamAllowEmptyKey = "allowempty"
// SchemaMinKey indicates the tag used to indicate the minimum value allowed for this type in swagger:route
SchemaMinKey = "min"
// SchemaMaxKey indicates the tag used to indicate the maximum value allowed for this type in swagger:route
SchemaMaxKey = "max"
// SchemaEnumKey indicates the tag used to specify the allowed values for this type in swagger:route
SchemaEnumKey = "enum"
// SchemaFormatKey indicates the expected format for this field in swagger:route
SchemaFormatKey = "format"
// SchemaDefaultKey indicates the default value for this field in swagger:route
SchemaDefaultKey = "default"
// SchemaMinLenKey indicates the minimum length this field in swagger:route
SchemaMinLenKey = "minlength"
// SchemaMaxLenKey indicates the minimum length this field in swagger:route
SchemaMaxLenKey = "maxlength"
// TypeArray is the identifier for an array type in swagger:route
TypeArray = "array"
// TypeNumber is the identifier for a number type in swagger:route
TypeNumber = "number"
// TypeInteger is the identifier for an integer type in swagger:route
TypeInteger = "integer"
// TypeBoolean is the identifier for a boolean type in swagger:route
TypeBoolean = "boolean"
// TypeBool is the identifier for a boolean type in swagger:route
TypeBool = "bool"
// TypeObject is the identifier for an object type in swagger:route
TypeObject = "object"
// TypeString is the identifier for a string type in swagger:route
TypeString = "string"
)
var (
validIn = []string{"path", "query", "header", "body", "form"}
basicTypes = []string{TypeInteger, TypeNumber, TypeString, TypeBoolean, TypeBool, TypeArray}
)
func newSetParams(params []*spec.Parameter, setter func([]*spec.Parameter)) *setOpParams {
return &setOpParams{
set: setter,
parameters: params,
}
}
type setOpParams struct {
set func([]*spec.Parameter)
parameters []*spec.Parameter
}
func (s *setOpParams) Matches(line string) bool {
return rxParameters.MatchString(line)
}
func (s *setOpParams) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
var current *spec.Parameter
var extraData map[string]string
for _, line := range lines {
l := strings.TrimSpace(line)
if strings.HasPrefix(l, "+") {
s.finalizeParam(current, extraData)
current = new(spec.Parameter)
extraData = make(map[string]string)
l = strings.TrimPrefix(l, "+")
}
kv := strings.SplitN(l, ":", 2)
if len(kv) <= 1 {
continue
}
key := strings.ToLower(strings.TrimSpace(kv[0]))
value := strings.TrimSpace(kv[1])
if current == nil {
return errors.New("invalid route/operation schema provided")
}
switch key {
case ParamDescriptionKey:
current.Description = value
case ParamNameKey:
current.Name = value
case ParamInKey:
v := strings.ToLower(value)
if contains(validIn, v) {
current.In = v
}
case ParamRequiredKey:
if v, err := strconv.ParseBool(value); err == nil {
current.Required = v
}
case ParamTypeKey:
if current.Schema == nil {
current.Schema = new(spec.Schema)
}
if contains(basicTypes, value) {
current.Type = strings.ToLower(value)
if current.Type == TypeBool {
current.Type = TypeBoolean
}
} else {
if ref, err := spec.NewRef("#/definitions/" + value); err == nil {
current.Type = TypeObject
current.Schema.Ref = ref
}
}
current.Schema.Type = spec.StringOrArray{current.Type}
case ParamAllowEmptyKey:
if v, err := strconv.ParseBool(value); err == nil {
current.AllowEmptyValue = v
}
default:
extraData[key] = value
}
}
s.finalizeParam(current, extraData)
s.set(s.parameters)
return nil
}
func (s *setOpParams) finalizeParam(param *spec.Parameter, data map[string]string) {
if param == nil {
return
}
processSchema(data, param)
s.parameters = append(s.parameters, param)
}
func processSchema(data map[string]string, param *spec.Parameter) {
if param.Schema == nil {
return
}
var enumValues []string
for key, value := range data {
switch key {
case SchemaMinKey:
if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
v, _ := strconv.ParseFloat(value, 64)
param.Schema.Minimum = &v
}
case SchemaMaxKey:
if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
v, _ := strconv.ParseFloat(value, 64)
param.Schema.Maximum = &v
}
case SchemaMinLenKey:
if getType(param.Schema) == TypeArray {
v, _ := strconv.ParseInt(value, 10, 64)
param.Schema.MinLength = &v
}
case SchemaMaxLenKey:
if getType(param.Schema) == TypeArray {
v, _ := strconv.ParseInt(value, 10, 64)
param.Schema.MaxLength = &v
}
case SchemaEnumKey:
enumValues = strings.Split(value, ",")
case SchemaFormatKey:
param.Schema.Format = value
case SchemaDefaultKey:
param.Schema.Default = convert(param.Type, value)
}
}
if param.Description != "" {
param.Schema.Description = param.Description
}
convertEnum(param.Schema, enumValues)
}
func convertEnum(schema *spec.Schema, enumValues []string) {
if len(enumValues) == 0 {
return
}
var finalEnum []interface{}
for _, v := range enumValues {
finalEnum = append(finalEnum, convert(schema.Type[0], strings.TrimSpace(v)))
}
schema.Enum = finalEnum
}
func convert(typeStr, valueStr string) interface{} {
switch typeStr {
case TypeInteger:
fallthrough
case TypeNumber:
if num, err := strconv.ParseFloat(valueStr, 64); err == nil {
return num
}
case TypeBoolean:
fallthrough
case TypeBool:
if b, err := strconv.ParseBool(valueStr); err == nil {
return b
}
}
return valueStr
}
func getType(schema *spec.Schema) string {
if len(schema.Type) == 0 {
return ""
}
return schema.Type[0]
}
func contains(arr []string, obj string) bool {
for _, v := range arr {
if v == obj {
return true
}
}
return false
}

138
vendor/github.com/go-swagger/go-swagger/scan/routes.go generated vendored Normal file
View file

@ -0,0 +1,138 @@
// +build !go1.11
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scan
import (
"fmt"
"go/ast"
"github.com/go-openapi/spec"
"golang.org/x/tools/go/loader"
)
func opConsumesSetter(op *spec.Operation) func([]string) {
return func(consumes []string) { op.Consumes = consumes }
}
func opProducesSetter(op *spec.Operation) func([]string) {
return func(produces []string) { op.Produces = produces }
}
func opSchemeSetter(op *spec.Operation) func([]string) {
return func(schemes []string) { op.Schemes = schemes }
}
func opSecurityDefsSetter(op *spec.Operation) func([]map[string][]string) {
return func(securityDefs []map[string][]string) { op.Security = securityDefs }
}
func opResponsesSetter(op *spec.Operation) func(*spec.Response, map[int]spec.Response) {
return func(def *spec.Response, scr map[int]spec.Response) {
if op.Responses == nil {
op.Responses = new(spec.Responses)
}
op.Responses.Default = def
op.Responses.StatusCodeResponses = scr
}
}
func opParamSetter(op *spec.Operation) func([]*spec.Parameter) {
return func(params []*spec.Parameter) {
for _, v := range params {
op.AddParam(v)
}
}
}
func newRoutesParser(prog *loader.Program) *routesParser {
return &routesParser{
program: prog,
}
}
type routesParser struct {
program *loader.Program
definitions map[string]spec.Schema
operations map[string]*spec.Operation
responses map[string]spec.Response
parameters []*spec.Parameter
}
func (rp *routesParser) Parse(gofile *ast.File, target interface{}, includeTags map[string]bool, excludeTags map[string]bool) error {
tgt := target.(*spec.Paths)
for _, comsec := range gofile.Comments {
content := parsePathAnnotation(rxRoute, comsec.List)
if content.Method == "" {
continue // it's not, next!
}
if !shouldAcceptTag(content.Tags, includeTags, excludeTags) {
if Debug {
fmt.Printf("route %s %s is ignored due to tag rules\n", content.Method, content.Path)
}
continue
}
pthObj := tgt.Paths[content.Path]
op := setPathOperation(
content.Method, content.ID,
&pthObj, rp.operations[content.ID])
op.Tags = content.Tags
sp := new(sectionedParser)
sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
sr := newSetResponses(rp.definitions, rp.responses, opResponsesSetter(op))
spa := newSetParams(rp.parameters, opParamSetter(op))
sp.taggers = []tagParser{
newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, opConsumesSetter(op)), false),
newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, opProducesSetter(op)), false),
newSingleLineTagParser("Schemes", newSetSchemes(opSchemeSetter(op))),
newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, opSecurityDefsSetter(op)), false),
newMultiLineTagParser("Parameters", spa, false),
newMultiLineTagParser("Responses", sr, false),
}
if err := sp.Parse(content.Remaining); err != nil {
return fmt.Errorf("operation (%s): %v", op.ID, err)
}
if tgt.Paths == nil {
tgt.Paths = make(map[string]spec.PathItem)
}
tgt.Paths[content.Path] = pthObj
}
return nil
}
func shouldAcceptTag(tags []string, includeTags map[string]bool, excludeTags map[string]bool) bool {
for _, tag := range tags {
if len(includeTags) > 0 {
if includeTags[tag] {
return true
}
} else if len(excludeTags) > 0 {
if excludeTags[tag] {
return false
}
}
}
return len(includeTags) <= 0
}

951
vendor/github.com/go-swagger/go-swagger/scan/scanner.go generated vendored Normal file
View file

@ -0,0 +1,951 @@
// +build !go1.11
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scan
import (
"encoding/json"
"errors"
"fmt"
"go/ast"
"go/build"
goparser "go/parser"
"go/types"
"log"
"os"
"regexp"
"strings"
"github.com/go-openapi/loads/fmts"
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
"golang.org/x/tools/go/loader"
yaml "gopkg.in/yaml.v2"
)
const (
rxMethod = "(\\p{L}+)"
rxPath = "((?:/[\\p{L}\\p{N}\\p{Pd}\\p{Pc}{}\\-\\.\\?_~%!$&'()*+,;=:@/]*)+/?)"
rxOpTags = "(\\p{L}[\\p{L}\\p{N}\\p{Pd}\\.\\p{Pc}\\p{Zs}]+)"
rxOpID = "((?:\\p{L}[\\p{L}\\p{N}\\p{Pd}\\p{Pc}]+)+)"
rxMaximumFmt = "%s[Mm]ax(?:imum)?\\p{Zs}*:\\p{Zs}*([\\<=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
rxMinimumFmt = "%s[Mm]in(?:imum)?\\p{Zs}*:\\p{Zs}*([\\>=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
rxMultipleOfFmt = "%s[Mm]ultiple\\p{Zs}*[Oo]f\\p{Zs}*:\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
rxMaxLengthFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
rxMinLengthFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
rxPatternFmt = "%s[Pp]attern\\p{Zs}*:\\p{Zs}*(.*)$"
rxCollectionFormatFmt = "%s[Cc]ollection(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ff]ormat)\\p{Zs}*:\\p{Zs}*(.*)$"
rxEnumFmt = "%s[Ee]num\\p{Zs}*:\\p{Zs}*(.*)$"
rxDefaultFmt = "%s[Dd]efault\\p{Zs}*:\\p{Zs}*(.*)$"
rxExampleFmt = "%s[Ee]xample\\p{Zs}*:\\p{Zs}*(.*)$"
rxMaxItemsFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
rxMinItemsFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
rxUniqueFmt = "%s[Uu]nique\\p{Zs}*:\\p{Zs}*(true|false)$"
rxItemsPrefixFmt = "(?:[Ii]tems[\\.\\p{Zs}]*){%d}"
)
var (
rxSwaggerAnnotation = regexp.MustCompile(`swagger:([\p{L}\p{N}\p{Pd}\p{Pc}]+)`)
rxFileUpload = regexp.MustCompile(`swagger:file`)
rxStrFmt = regexp.MustCompile(`swagger:strfmt\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
rxAlias = regexp.MustCompile(`swagger:alias`)
rxName = regexp.MustCompile(`swagger:name\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)$`)
rxAllOf = regexp.MustCompile(`swagger:allOf\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)?$`)
rxModelOverride = regexp.MustCompile(`swagger:model\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
rxResponseOverride = regexp.MustCompile(`swagger:response\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
rxParametersOverride = regexp.MustCompile(`swagger:parameters\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\p{Zs}]+)$`)
rxEnum = regexp.MustCompile(`swagger:enum\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
rxIgnoreOverride = regexp.MustCompile(`swagger:ignore\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
rxDefault = regexp.MustCompile(`swagger:default\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
rxType = regexp.MustCompile(`swagger:type\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
rxRoute = regexp.MustCompile(
"swagger:route\\p{Zs}*" +
rxMethod +
"\\p{Zs}*" +
rxPath +
"(?:\\p{Zs}+" +
rxOpTags +
")?\\p{Zs}+" +
rxOpID + "\\p{Zs}*$")
rxBeginYAMLSpec = regexp.MustCompile(`---\p{Zs}*$`)
rxUncommentHeaders = regexp.MustCompile(`^[\p{Zs}\t/\*-]*\|?`)
rxUncommentYAML = regexp.MustCompile(`^[\p{Zs}\t]*/*`)
rxOperation = regexp.MustCompile(
"swagger:operation\\p{Zs}*" +
rxMethod +
"\\p{Zs}*" +
rxPath +
"(?:\\p{Zs}+" +
rxOpTags +
")?\\p{Zs}+" +
rxOpID + "\\p{Zs}*$")
rxSpace = regexp.MustCompile(`\p{Zs}+`)
rxIndent = regexp.MustCompile(`\p{Zs}*/*\p{Zs}*[^\p{Zs}]`)
rxPunctuationEnd = regexp.MustCompile(`\p{Po}$`)
rxStripComments = regexp.MustCompile(`^[^\p{L}\p{N}\p{Pd}\p{Pc}\+]*`)
rxStripTitleComments = regexp.MustCompile(`^[^\p{L}]*[Pp]ackage\p{Zs}+[^\p{Zs}]+\p{Zs}*`)
rxAllowedExtensions = regexp.MustCompile(`^[Xx]-`)
rxIn = regexp.MustCompile(`[Ii]n\p{Zs}*:\p{Zs}*(query|path|header|body|formData)$`)
rxRequired = regexp.MustCompile(`[Rr]equired\p{Zs}*:\p{Zs}*(true|false)$`)
rxDiscriminator = regexp.MustCompile(`[Dd]iscriminator\p{Zs}*:\p{Zs}*(true|false)$`)
rxReadOnly = regexp.MustCompile(`[Rr]ead(?:\p{Zs}*|[\p{Pd}\p{Pc}])?[Oo]nly\p{Zs}*:\p{Zs}*(true|false)$`)
rxConsumes = regexp.MustCompile(`[Cc]onsumes\p{Zs}*:`)
rxProduces = regexp.MustCompile(`[Pp]roduces\p{Zs}*:`)
rxSecuritySchemes = regexp.MustCompile(`[Ss]ecurity\p{Zs}*:`)
rxSecurity = regexp.MustCompile(`[Ss]ecurity\p{Zs}*[Dd]efinitions:`)
rxResponses = regexp.MustCompile(`[Rr]esponses\p{Zs}*:`)
rxParameters = regexp.MustCompile(`[Pp]arameters\p{Zs}*:`)
rxSchemes = regexp.MustCompile(`[Ss]chemes\p{Zs}*:\p{Zs}*((?:(?:https?|HTTPS?|wss?|WSS?)[\p{Zs},]*)+)$`)
rxVersion = regexp.MustCompile(`[Vv]ersion\p{Zs}*:\p{Zs}*(.+)$`)
rxHost = regexp.MustCompile(`[Hh]ost\p{Zs}*:\p{Zs}*(.+)$`)
rxBasePath = regexp.MustCompile(`[Bb]ase\p{Zs}*-*[Pp]ath\p{Zs}*:\p{Zs}*` + rxPath + "$")
rxLicense = regexp.MustCompile(`[Ll]icense\p{Zs}*:\p{Zs}*(.+)$`)
rxContact = regexp.MustCompile(`[Cc]ontact\p{Zs}*-?(?:[Ii]info\p{Zs}*)?:\p{Zs}*(.+)$`)
rxTOS = regexp.MustCompile(`[Tt](:?erms)?\p{Zs}*-?[Oo]f?\p{Zs}*-?[Ss](?:ervice)?\p{Zs}*:`)
rxExtensions = regexp.MustCompile(`[Ee]xtensions\p{Zs}*:`)
rxInfoExtensions = regexp.MustCompile(`[In]nfo\p{Zs}*[Ee]xtensions:`)
// currently unused: rxExample = regexp.MustCompile(`[Ex]ample\p{Zs}*:\p{Zs}*(.*)$`)
)
// Many thanks go to https://github.com/yvasiyarov/swagger
// this is loosely based on that implementation but for swagger 2.0
func joinDropLast(lines []string) string {
l := len(lines)
lns := lines
if l > 0 && len(strings.TrimSpace(lines[l-1])) == 0 {
lns = lines[:l-1]
}
return strings.Join(lns, "\n")
}
func removeEmptyLines(lines []string) (notEmpty []string) {
for _, l := range lines {
if len(strings.TrimSpace(l)) > 0 {
notEmpty = append(notEmpty, l)
}
}
return
}
func rxf(rxp, ar string) *regexp.Regexp {
return regexp.MustCompile(fmt.Sprintf(rxp, ar))
}
// The Opts for the application scanner.
type Opts struct {
BasePath string
Input *spec.Swagger
ScanModels bool
BuildTags string
Include []string
Exclude []string
IncludeTags []string
ExcludeTags []string
}
func safeConvert(str string) bool {
b, err := swag.ConvertBool(str)
if err != nil {
return false
}
return b
}
// Debug is true when process is run with DEBUG=1 env var
var Debug = safeConvert(os.Getenv("DEBUG"))
// Application scans the application and builds a swagger spec based on the information from the code files.
// When there are includes provided, only those files are considered for the initial discovery.
// Similarly the excludes will exclude an item from initial discovery through scanning for annotations.
// When something in the discovered items requires a type that is contained in the includes or excludes it will still be
// in the spec.
func Application(opts Opts) (*spec.Swagger, error) {
parser, err := newAppScanner(&opts)
if err != nil {
return nil, err
}
return parser.Parse()
}
// appScanner the global context for scanning a go application
// into a swagger specification
type appScanner struct {
loader *loader.Config
prog *loader.Program
classifier *programClassifier
discovered []schemaDecl
input *spec.Swagger
definitions map[string]spec.Schema
responses map[string]spec.Response
operations map[string]*spec.Operation
scanModels bool
includeTags map[string]bool
excludeTas map[string]bool
// MainPackage the path to find the main class in
MainPackage string
}
// newAppScanner creates a new api parser
func newAppScanner(opts *Opts) (*appScanner, error) {
if Debug {
log.Println("scanning packages discovered through entrypoint @ ", opts.BasePath)
}
var ldr loader.Config
ldr.ParserMode = goparser.ParseComments
ldr.Import(opts.BasePath)
if opts.BuildTags != "" {
ldr.Build = &build.Default
ldr.Build.BuildTags = strings.Split(opts.BuildTags, ",")
}
ldr.TypeChecker = types.Config{FakeImportC: true}
prog, err := ldr.Load()
if err != nil {
return nil, err
}
var includes, excludes packageFilters
if len(opts.Include) > 0 {
for _, include := range opts.Include {
includes = append(includes, packageFilter{Name: include})
}
}
if len(opts.Exclude) > 0 {
for _, exclude := range opts.Exclude {
excludes = append(excludes, packageFilter{Name: exclude})
}
}
includeTags := make(map[string]bool)
for _, includeTag := range opts.IncludeTags {
includeTags[includeTag] = true
}
excludeTags := make(map[string]bool)
for _, excludeTag := range opts.ExcludeTags {
excludeTags[excludeTag] = true
}
input := opts.Input
if input == nil {
input = new(spec.Swagger)
input.Swagger = "2.0"
}
if input.Paths == nil {
input.Paths = new(spec.Paths)
}
if input.Definitions == nil {
input.Definitions = make(map[string]spec.Schema)
}
if input.Responses == nil {
input.Responses = make(map[string]spec.Response)
}
if input.Extensions == nil {
input.Extensions = make(spec.Extensions)
}
return &appScanner{
MainPackage: opts.BasePath,
prog: prog,
input: input,
loader: &ldr,
operations: collectOperationsFromInput(input),
definitions: input.Definitions,
responses: input.Responses,
scanModels: opts.ScanModels,
classifier: &programClassifier{
Includes: includes,
Excludes: excludes,
},
includeTags: includeTags,
excludeTas: excludeTags,
}, nil
}
func collectOperationsFromInput(input *spec.Swagger) map[string]*spec.Operation {
operations := make(map[string]*spec.Operation)
if input != nil && input.Paths != nil {
for _, pth := range input.Paths.Paths {
if pth.Get != nil {
operations[pth.Get.ID] = pth.Get
}
if pth.Post != nil {
operations[pth.Post.ID] = pth.Post
}
if pth.Put != nil {
operations[pth.Put.ID] = pth.Put
}
if pth.Patch != nil {
operations[pth.Patch.ID] = pth.Patch
}
if pth.Delete != nil {
operations[pth.Delete.ID] = pth.Delete
}
if pth.Head != nil {
operations[pth.Head.ID] = pth.Head
}
if pth.Options != nil {
operations[pth.Options.ID] = pth.Options
}
}
}
return operations
}
// Parse produces a swagger object for an application
func (a *appScanner) Parse() (*spec.Swagger, error) {
// classification still includes files that are completely commented out
cp, err := a.classifier.Classify(a.prog)
if err != nil {
return nil, err
}
// build models dictionary
if a.scanModels {
for _, modelsFile := range cp.Models {
if err := a.parseSchema(modelsFile); err != nil {
return nil, err
}
}
}
// build parameters dictionary
for _, paramsFile := range cp.Parameters {
if err := a.parseParameters(paramsFile); err != nil {
return nil, err
}
}
// build responses dictionary
for _, responseFile := range cp.Responses {
if err := a.parseResponses(responseFile); err != nil {
return nil, err
}
}
// build definitions dictionary
if err := a.processDiscovered(); err != nil {
return nil, err
}
// build paths dictionary
for _, routeFile := range cp.Routes {
if err := a.parseRoutes(routeFile); err != nil {
return nil, err
}
}
for _, operationFile := range cp.Operations {
if err := a.parseOperations(operationFile); err != nil {
return nil, err
}
}
// build swagger object
for _, metaFile := range cp.Meta {
if err := a.parseMeta(metaFile); err != nil {
return nil, err
}
}
if a.input.Swagger == "" {
a.input.Swagger = "2.0"
}
return a.input, nil
}
func (a *appScanner) processDiscovered() error {
// loop over discovered until all the items are in definitions
keepGoing := len(a.discovered) > 0
for keepGoing {
var queue []schemaDecl
for _, d := range a.discovered {
if _, ok := a.definitions[d.Name]; !ok {
queue = append(queue, d)
}
}
a.discovered = nil
for _, sd := range queue {
if err := a.parseDiscoveredSchema(sd); err != nil {
return err
}
}
keepGoing = len(a.discovered) > 0
}
return nil
}
func (a *appScanner) parseSchema(file *ast.File) error {
sp := newSchemaParser(a.prog)
if err := sp.Parse(file, a.definitions); err != nil {
return err
}
a.discovered = append(a.discovered, sp.postDecls...)
return nil
}
func (a *appScanner) parseDiscoveredSchema(sd schemaDecl) error {
sp := newSchemaParser(a.prog)
sp.discovered = &sd
if err := sp.Parse(sd.File, a.definitions); err != nil {
return err
}
a.discovered = append(a.discovered, sp.postDecls...)
return nil
}
func (a *appScanner) parseRoutes(file *ast.File) error {
rp := newRoutesParser(a.prog)
rp.operations = a.operations
rp.definitions = a.definitions
rp.responses = a.responses
return rp.Parse(file, a.input.Paths, a.includeTags, a.excludeTas)
}
func (a *appScanner) parseOperations(file *ast.File) error {
op := newOperationsParser(a.prog)
op.operations = a.operations
op.definitions = a.definitions
op.responses = a.responses
return op.Parse(file, a.input.Paths, a.includeTags, a.excludeTas)
}
func (a *appScanner) parseParameters(file *ast.File) error {
rp := newParameterParser(a.prog)
if err := rp.Parse(file, a.operations); err != nil {
return err
}
a.discovered = append(a.discovered, rp.postDecls...)
a.discovered = append(a.discovered, rp.scp.postDecls...)
return nil
}
func (a *appScanner) parseResponses(file *ast.File) error {
rp := newResponseParser(a.prog)
if err := rp.Parse(file, a.responses); err != nil {
return err
}
a.discovered = append(a.discovered, rp.postDecls...)
a.discovered = append(a.discovered, rp.scp.postDecls...)
return nil
}
func (a *appScanner) parseMeta(file *ast.File) error {
return newMetaParser(a.input).Parse(file.Doc)
}
// MustExpandPackagePath gets the real package path on disk
func (a *appScanner) MustExpandPackagePath(packagePath string) string {
pkgRealpath := swag.FindInGoSearchPath(packagePath)
if pkgRealpath == "" {
log.Fatalf("Can't find package %s \n", packagePath)
}
return pkgRealpath
}
type swaggerTypable interface {
Typed(string, string)
SetRef(spec.Ref)
Items() swaggerTypable
Schema() *spec.Schema
Level() int
}
// Map all Go builtin types that have Json representation to Swagger/Json types.
// See https://golang.org/pkg/builtin/ and http://swagger.io/specification/
func swaggerSchemaForType(typeName string, prop swaggerTypable) error {
switch typeName {
case "bool":
prop.Typed("boolean", "")
case "byte":
prop.Typed("integer", "uint8")
case "complex128", "complex64":
return fmt.Errorf("unsupported builtin %q (no JSON marshaller)", typeName)
case "error":
// TODO: error is often marshalled into a string but not always (e.g. errors package creates
// errors that are marshalled into an empty object), this could be handled the same way
// custom JSON marshallers are handled (in future)
prop.Typed("string", "")
case "float32":
prop.Typed("number", "float")
case "float64":
prop.Typed("number", "double")
case "int":
prop.Typed("integer", "int64")
case "int16":
prop.Typed("integer", "int16")
case "int32":
prop.Typed("integer", "int32")
case "int64":
prop.Typed("integer", "int64")
case "int8":
prop.Typed("integer", "int8")
case "rune":
prop.Typed("integer", "int32")
case "string":
prop.Typed("string", "")
case "uint":
prop.Typed("integer", "uint64")
case "uint16":
prop.Typed("integer", "uint16")
case "uint32":
prop.Typed("integer", "uint32")
case "uint64":
prop.Typed("integer", "uint64")
case "uint8":
prop.Typed("integer", "uint8")
case "uintptr":
prop.Typed("integer", "uint64")
default:
return fmt.Errorf("unsupported type %q", typeName)
}
return nil
}
func newMultiLineTagParser(name string, parser valueParser, skipCleanUp bool) tagParser {
return tagParser{
Name: name,
MultiLine: true,
SkipCleanUp: skipCleanUp,
Parser: parser,
}
}
func newSingleLineTagParser(name string, parser valueParser) tagParser {
return tagParser{
Name: name,
MultiLine: false,
SkipCleanUp: false,
Parser: parser,
}
}
type tagParser struct {
Name string
MultiLine bool
SkipCleanUp bool
Lines []string
Parser valueParser
}
func (st *tagParser) Matches(line string) bool {
return st.Parser.Matches(line)
}
func (st *tagParser) Parse(lines []string) error {
return st.Parser.Parse(lines)
}
func newYamlParser(rx *regexp.Regexp, setter func(json.RawMessage) error) valueParser {
return &yamlParser{
set: setter,
rx: rx,
}
}
type yamlParser struct {
set func(json.RawMessage) error
rx *regexp.Regexp
}
func (y *yamlParser) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
var uncommented []string
uncommented = append(uncommented, removeYamlIndent(lines)...)
yamlContent := strings.Join(uncommented, "\n")
var yamlValue interface{}
err := yaml.Unmarshal([]byte(yamlContent), &yamlValue)
if err != nil {
return err
}
var jsonValue json.RawMessage
jsonValue, err = fmts.YAMLToJSON(yamlValue)
if err != nil {
return err
}
return y.set(jsonValue)
}
func (y *yamlParser) Matches(line string) bool {
return y.rx.MatchString(line)
}
// aggregates lines in header until it sees `---`,
// the beginning of a YAML spec
type yamlSpecScanner struct {
header []string
yamlSpec []string
setTitle func([]string)
setDescription func([]string)
workedOutTitle bool
title []string
skipHeader bool
}
func cleanupScannerLines(lines []string, ur *regexp.Regexp, yamlBlock *regexp.Regexp) []string {
// bail early when there is nothing to parse
if len(lines) == 0 {
return lines
}
seenLine := -1
var lastContent int
var uncommented []string
var startBlock bool
var yaml []string
for i, v := range lines {
if yamlBlock != nil && yamlBlock.MatchString(v) && !startBlock {
startBlock = true
if seenLine < 0 {
seenLine = i
}
continue
}
if startBlock {
if yamlBlock.MatchString(v) {
startBlock = false
uncommented = append(uncommented, removeIndent(yaml)...)
continue
}
yaml = append(yaml, v)
if v != "" {
if seenLine < 0 {
seenLine = i
}
lastContent = i
}
continue
}
str := ur.ReplaceAllString(v, "")
uncommented = append(uncommented, str)
if str != "" {
if seenLine < 0 {
seenLine = i
}
lastContent = i
}
}
// fixes issue #50
if seenLine == -1 {
return nil
}
return uncommented[seenLine : lastContent+1]
}
// a shared function that can be used to split given headers
// into a title and description
func collectScannerTitleDescription(headers []string) (title, desc []string) {
hdrs := cleanupScannerLines(headers, rxUncommentHeaders, nil)
idx := -1
for i, line := range hdrs {
if strings.TrimSpace(line) == "" {
idx = i
break
}
}
if idx > -1 {
title = hdrs[:idx]
if len(hdrs) > idx+1 {
desc = hdrs[idx+1:]
} else {
desc = nil
}
return
}
if len(hdrs) > 0 {
line := hdrs[0]
if rxPunctuationEnd.MatchString(line) {
title = []string{line}
desc = hdrs[1:]
} else {
desc = hdrs
}
}
return
}
func (sp *yamlSpecScanner) collectTitleDescription() {
if sp.workedOutTitle {
return
}
if sp.setTitle == nil {
sp.header = cleanupScannerLines(sp.header, rxUncommentHeaders, nil)
return
}
sp.workedOutTitle = true
sp.title, sp.header = collectScannerTitleDescription(sp.header)
}
func (sp *yamlSpecScanner) Title() []string {
sp.collectTitleDescription()
return sp.title
}
func (sp *yamlSpecScanner) Description() []string {
sp.collectTitleDescription()
return sp.header
}
func (sp *yamlSpecScanner) Parse(doc *ast.CommentGroup) error {
if doc == nil {
return nil
}
var startedYAMLSpec bool
COMMENTS:
for _, c := range doc.List {
for _, line := range strings.Split(c.Text, "\n") {
if rxSwaggerAnnotation.MatchString(line) {
break COMMENTS // a new swagger: annotation terminates this parser
}
if !startedYAMLSpec {
if rxBeginYAMLSpec.MatchString(line) {
startedYAMLSpec = true
sp.yamlSpec = append(sp.yamlSpec, line)
continue
}
if !sp.skipHeader {
sp.header = append(sp.header, line)
}
// no YAML spec yet, moving on
continue
}
sp.yamlSpec = append(sp.yamlSpec, line)
}
}
if sp.setTitle != nil {
sp.setTitle(sp.Title())
}
if sp.setDescription != nil {
sp.setDescription(sp.Description())
}
return nil
}
func (sp *yamlSpecScanner) UnmarshalSpec(u func([]byte) error) (err error) {
spec := cleanupScannerLines(sp.yamlSpec, rxUncommentYAML, nil)
if len(spec) == 0 {
return errors.New("no spec available to unmarshal")
}
if !strings.Contains(spec[0], "---") {
return errors.New("yaml spec has to start with `---`")
}
// remove indentation
spec = removeIndent(spec)
// 1. parse yaml lines
yamlValue := make(map[interface{}]interface{})
yamlContent := strings.Join(spec, "\n")
err = yaml.Unmarshal([]byte(yamlContent), &yamlValue)
if err != nil {
return
}
// 2. convert to json
var jsonValue json.RawMessage
jsonValue, err = fmts.YAMLToJSON(yamlValue)
if err != nil {
return
}
// 3. unmarshal the json into an interface
var data []byte
data, err = jsonValue.MarshalJSON()
if err != nil {
return
}
err = u(data)
if err != nil {
return
}
// all parsed, returning...
sp.yamlSpec = nil // spec is now consumed, so let's erase the parsed lines
return
}
// removes indent base on the first line
func removeIndent(spec []string) []string {
loc := rxIndent.FindStringIndex(spec[0])
if loc[1] > 0 {
for i := range spec {
if len(spec[i]) >= loc[1] {
spec[i] = spec[i][loc[1]-1:]
}
}
}
return spec
}
// removes indent base on the first line
func removeYamlIndent(spec []string) []string {
loc := rxIndent.FindStringIndex(spec[0])
var s []string
if loc[1] > 0 {
for i := range spec {
if len(spec[i]) >= loc[1] {
s = append(s, spec[i][loc[1]-1:])
}
}
}
return s
}
// aggregates lines in header until it sees a tag.
type sectionedParser struct {
header []string
matched map[string]tagParser
annotation valueParser
seenTag bool
skipHeader bool
setTitle func([]string)
setDescription func([]string)
workedOutTitle bool
taggers []tagParser
currentTagger *tagParser
title []string
ignored bool
}
func (st *sectionedParser) collectTitleDescription() {
if st.workedOutTitle {
return
}
if st.setTitle == nil {
st.header = cleanupScannerLines(st.header, rxUncommentHeaders, nil)
return
}
st.workedOutTitle = true
st.title, st.header = collectScannerTitleDescription(st.header)
}
func (st *sectionedParser) Title() []string {
st.collectTitleDescription()
return st.title
}
func (st *sectionedParser) Description() []string {
st.collectTitleDescription()
return st.header
}
func (st *sectionedParser) Parse(doc *ast.CommentGroup) error {
if doc == nil {
return nil
}
COMMENTS:
for _, c := range doc.List {
for _, line := range strings.Split(c.Text, "\n") {
if rxSwaggerAnnotation.MatchString(line) {
if rxIgnoreOverride.MatchString(line) {
st.ignored = true
break COMMENTS // an explicit ignore terminates this parser
}
if st.annotation == nil || !st.annotation.Matches(line) {
break COMMENTS // a new swagger: annotation terminates this parser
}
_ = st.annotation.Parse([]string{line})
if len(st.header) > 0 {
st.seenTag = true
}
continue
}
var matched bool
for _, tagger := range st.taggers {
if tagger.Matches(line) {
st.seenTag = true
st.currentTagger = &tagger
matched = true
break
}
}
if st.currentTagger == nil {
if !st.skipHeader && !st.seenTag {
st.header = append(st.header, line)
}
// didn't match a tag, moving on
continue
}
if st.currentTagger.MultiLine && matched {
// the first line of a multiline tagger doesn't count
continue
}
ts, ok := st.matched[st.currentTagger.Name]
if !ok {
ts = *st.currentTagger
}
ts.Lines = append(ts.Lines, line)
if st.matched == nil {
st.matched = make(map[string]tagParser)
}
st.matched[st.currentTagger.Name] = ts
if !st.currentTagger.MultiLine {
st.currentTagger = nil
}
}
}
if st.setTitle != nil {
st.setTitle(st.Title())
}
if st.setDescription != nil {
st.setDescription(st.Description())
}
for _, mt := range st.matched {
if !mt.SkipCleanUp {
mt.Lines = cleanupScannerLines(mt.Lines, rxUncommentHeaders, nil)
}
if err := mt.Parse(mt.Lines); err != nil {
return err
}
}
return nil
}

1336
vendor/github.com/go-swagger/go-swagger/scan/schema.go generated vendored Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,828 @@
// +build !go1.11
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scan
import (
"encoding/json"
"fmt"
"regexp"
"strconv"
"strings"
"github.com/go-openapi/spec"
)
type validationBuilder interface {
SetMaximum(float64, bool)
SetMinimum(float64, bool)
SetMultipleOf(float64)
SetMinItems(int64)
SetMaxItems(int64)
SetMinLength(int64)
SetMaxLength(int64)
SetPattern(string)
SetUnique(bool)
SetEnum(string)
SetDefault(interface{})
SetExample(interface{})
}
type valueParser interface {
Parse([]string) error
Matches(string) bool
}
type setMaximum struct {
builder validationBuilder
rx *regexp.Regexp
}
func (sm *setMaximum) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := sm.rx.FindStringSubmatch(lines[0])
if len(matches) > 2 && len(matches[2]) > 0 {
max, err := strconv.ParseFloat(matches[2], 64)
if err != nil {
return err
}
sm.builder.SetMaximum(max, matches[1] == "<")
}
return nil
}
func (sm *setMaximum) Matches(line string) bool {
return sm.rx.MatchString(line)
}
type setMinimum struct {
builder validationBuilder
rx *regexp.Regexp
}
func (sm *setMinimum) Matches(line string) bool {
return sm.rx.MatchString(line)
}
func (sm *setMinimum) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := sm.rx.FindStringSubmatch(lines[0])
if len(matches) > 2 && len(matches[2]) > 0 {
min, err := strconv.ParseFloat(matches[2], 64)
if err != nil {
return err
}
sm.builder.SetMinimum(min, matches[1] == ">")
}
return nil
}
type setMultipleOf struct {
builder validationBuilder
rx *regexp.Regexp
}
func (sm *setMultipleOf) Matches(line string) bool {
return sm.rx.MatchString(line)
}
func (sm *setMultipleOf) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := sm.rx.FindStringSubmatch(lines[0])
if len(matches) > 2 && len(matches[1]) > 0 {
multipleOf, err := strconv.ParseFloat(matches[1], 64)
if err != nil {
return err
}
sm.builder.SetMultipleOf(multipleOf)
}
return nil
}
type setMaxItems struct {
builder validationBuilder
rx *regexp.Regexp
}
func (sm *setMaxItems) Matches(line string) bool {
return sm.rx.MatchString(line)
}
func (sm *setMaxItems) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := sm.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
maxItems, err := strconv.ParseInt(matches[1], 10, 64)
if err != nil {
return err
}
sm.builder.SetMaxItems(maxItems)
}
return nil
}
type setMinItems struct {
builder validationBuilder
rx *regexp.Regexp
}
func (sm *setMinItems) Matches(line string) bool {
return sm.rx.MatchString(line)
}
func (sm *setMinItems) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := sm.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
minItems, err := strconv.ParseInt(matches[1], 10, 64)
if err != nil {
return err
}
sm.builder.SetMinItems(minItems)
}
return nil
}
type setMaxLength struct {
builder validationBuilder
rx *regexp.Regexp
}
func (sm *setMaxLength) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := sm.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
maxLength, err := strconv.ParseInt(matches[1], 10, 64)
if err != nil {
return err
}
sm.builder.SetMaxLength(maxLength)
}
return nil
}
func (sm *setMaxLength) Matches(line string) bool {
return sm.rx.MatchString(line)
}
type setMinLength struct {
builder validationBuilder
rx *regexp.Regexp
}
func (sm *setMinLength) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := sm.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
minLength, err := strconv.ParseInt(matches[1], 10, 64)
if err != nil {
return err
}
sm.builder.SetMinLength(minLength)
}
return nil
}
func (sm *setMinLength) Matches(line string) bool {
return sm.rx.MatchString(line)
}
type setPattern struct {
builder validationBuilder
rx *regexp.Regexp
}
func (sm *setPattern) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := sm.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
sm.builder.SetPattern(matches[1])
}
return nil
}
func (sm *setPattern) Matches(line string) bool {
return sm.rx.MatchString(line)
}
type setCollectionFormat struct {
builder operationValidationBuilder
rx *regexp.Regexp
}
func (sm *setCollectionFormat) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := sm.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
sm.builder.SetCollectionFormat(matches[1])
}
return nil
}
func (sm *setCollectionFormat) Matches(line string) bool {
return sm.rx.MatchString(line)
}
type setUnique struct {
builder validationBuilder
rx *regexp.Regexp
}
func (su *setUnique) Matches(line string) bool {
return su.rx.MatchString(line)
}
func (su *setUnique) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := su.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
req, err := strconv.ParseBool(matches[1])
if err != nil {
return err
}
su.builder.SetUnique(req)
}
return nil
}
type setEnum struct {
builder validationBuilder
rx *regexp.Regexp
}
func (se *setEnum) Matches(line string) bool {
return se.rx.MatchString(line)
}
func (se *setEnum) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := se.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
se.builder.SetEnum(matches[1])
}
return nil
}
func parseValueFromSchema(s string, schema *spec.SimpleSchema) (interface{}, error) {
if schema != nil {
switch strings.Trim(schema.TypeName(), "\"") {
case "integer", "int", "int64", "int32", "int16":
return strconv.Atoi(s)
case "bool", "boolean":
return strconv.ParseBool(s)
case "number", "float64", "float32":
return strconv.ParseFloat(s, 64)
case "object":
var obj map[string]interface{}
if err := json.Unmarshal([]byte(s), &obj); err != nil {
// If we can't parse it, just return the string.
return s, nil
}
return obj, nil
case "array":
var slice []interface{}
if err := json.Unmarshal([]byte(s), &slice); err != nil {
// If we can't parse it, just return the string.
return s, nil
}
return slice, nil
default:
return s, nil
}
} else {
return s, nil
}
}
type setDefault struct {
scheme *spec.SimpleSchema
builder validationBuilder
rx *regexp.Regexp
}
func (sd *setDefault) Matches(line string) bool {
return sd.rx.MatchString(line)
}
func (sd *setDefault) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := sd.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
d, err := parseValueFromSchema(matches[1], sd.scheme)
if err != nil {
return err
}
sd.builder.SetDefault(d)
}
return nil
}
type setExample struct {
scheme *spec.SimpleSchema
builder validationBuilder
rx *regexp.Regexp
}
func (se *setExample) Matches(line string) bool {
return se.rx.MatchString(line)
}
func (se *setExample) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := se.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
d, err := parseValueFromSchema(matches[1], se.scheme)
if err != nil {
return err
}
se.builder.SetExample(d)
}
return nil
}
type matchOnlyParam struct {
tgt *spec.Parameter
rx *regexp.Regexp
}
func (mo *matchOnlyParam) Matches(line string) bool {
return mo.rx.MatchString(line)
}
func (mo *matchOnlyParam) Parse(lines []string) error {
return nil
}
type setRequiredParam struct {
tgt *spec.Parameter
}
func (su *setRequiredParam) Matches(line string) bool {
return rxRequired.MatchString(line)
}
func (su *setRequiredParam) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := rxRequired.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
req, err := strconv.ParseBool(matches[1])
if err != nil {
return err
}
su.tgt.Required = req
}
return nil
}
type setReadOnlySchema struct {
tgt *spec.Schema
}
func (su *setReadOnlySchema) Matches(line string) bool {
return rxReadOnly.MatchString(line)
}
func (su *setReadOnlySchema) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := rxReadOnly.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
req, err := strconv.ParseBool(matches[1])
if err != nil {
return err
}
su.tgt.ReadOnly = req
}
return nil
}
type setDiscriminator struct {
schema *spec.Schema
field string
}
func (su *setDiscriminator) Matches(line string) bool {
return rxDiscriminator.MatchString(line)
}
func (su *setDiscriminator) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := rxDiscriminator.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
req, err := strconv.ParseBool(matches[1])
if err != nil {
return err
}
if req {
su.schema.Discriminator = su.field
} else {
if su.schema.Discriminator == su.field {
su.schema.Discriminator = ""
}
}
}
return nil
}
type setRequiredSchema struct {
schema *spec.Schema
field string
}
func (su *setRequiredSchema) Matches(line string) bool {
return rxRequired.MatchString(line)
}
func (su *setRequiredSchema) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := rxRequired.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
req, err := strconv.ParseBool(matches[1])
if err != nil {
return err
}
midx := -1
for i, nm := range su.schema.Required {
if nm == su.field {
midx = i
break
}
}
if req {
if midx < 0 {
su.schema.Required = append(su.schema.Required, su.field)
}
} else if midx >= 0 {
su.schema.Required = append(su.schema.Required[:midx], su.schema.Required[midx+1:]...)
}
}
return nil
}
func newMultilineDropEmptyParser(rx *regexp.Regexp, set func([]string)) *multiLineDropEmptyParser {
return &multiLineDropEmptyParser{
rx: rx,
set: set,
}
}
type multiLineDropEmptyParser struct {
set func([]string)
rx *regexp.Regexp
}
func (m *multiLineDropEmptyParser) Matches(line string) bool {
return m.rx.MatchString(line)
}
func (m *multiLineDropEmptyParser) Parse(lines []string) error {
m.set(removeEmptyLines(lines))
return nil
}
func newSetSchemes(set func([]string)) *setSchemes {
return &setSchemes{
set: set,
rx: rxSchemes,
}
}
type setSchemes struct {
set func([]string)
rx *regexp.Regexp
}
func (ss *setSchemes) Matches(line string) bool {
return ss.rx.MatchString(line)
}
func (ss *setSchemes) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
matches := ss.rx.FindStringSubmatch(lines[0])
if len(matches) > 1 && len(matches[1]) > 0 {
sch := strings.Split(matches[1], ", ")
var schemes []string
for _, s := range sch {
ts := strings.TrimSpace(s)
if ts != "" {
schemes = append(schemes, ts)
}
}
ss.set(schemes)
}
return nil
}
func newSetSecurity(rx *regexp.Regexp, setter func([]map[string][]string)) *setSecurity {
return &setSecurity{
set: setter,
rx: rx,
}
}
type setSecurity struct {
set func([]map[string][]string)
rx *regexp.Regexp
}
func (ss *setSecurity) Matches(line string) bool {
return ss.rx.MatchString(line)
}
func (ss *setSecurity) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
var result []map[string][]string
for _, line := range lines {
kv := strings.SplitN(line, ":", 2)
scopes := []string{}
var key string
if len(kv) > 1 {
scs := strings.Split(kv[1], ",")
for _, scope := range scs {
tr := strings.TrimSpace(scope)
if tr != "" {
tr = strings.SplitAfter(tr, " ")[0]
scopes = append(scopes, strings.TrimSpace(tr))
}
}
key = strings.TrimSpace(kv[0])
result = append(result, map[string][]string{key: scopes})
}
}
ss.set(result)
return nil
}
func newSetResponses(definitions map[string]spec.Schema, responses map[string]spec.Response, setter func(*spec.Response, map[int]spec.Response)) *setOpResponses {
return &setOpResponses{
set: setter,
rx: rxResponses,
definitions: definitions,
responses: responses,
}
}
type setOpResponses struct {
set func(*spec.Response, map[int]spec.Response)
rx *regexp.Regexp
definitions map[string]spec.Schema
responses map[string]spec.Response
}
func (ss *setOpResponses) Matches(line string) bool {
return ss.rx.MatchString(line)
}
//ResponseTag used when specifying a response to point to a defined swagger:response
const ResponseTag = "response"
//BodyTag used when specifying a response to point to a model/schema
const BodyTag = "body"
//DescriptionTag used when specifying a response that gives a description of the response
const DescriptionTag = "description"
func parseTags(line string) (modelOrResponse string, arrays int, isDefinitionRef bool, description string, err error) {
tags := strings.Split(line, " ")
parsedModelOrResponse := false
for i, tagAndValue := range tags {
tagValList := strings.SplitN(tagAndValue, ":", 2)
var tag, value string
if len(tagValList) > 1 {
tag = tagValList[0]
value = tagValList[1]
} else {
//TODO: Print a warning, and in the long term, do not support not tagged values
//Add a default tag if none is supplied
if i == 0 {
tag = ResponseTag
} else {
tag = DescriptionTag
}
value = tagValList[0]
}
foundModelOrResponse := false
if !parsedModelOrResponse {
if tag == BodyTag {
foundModelOrResponse = true
isDefinitionRef = true
}
if tag == ResponseTag {
foundModelOrResponse = true
isDefinitionRef = false
}
}
if foundModelOrResponse {
//Read the model or response tag
parsedModelOrResponse = true
//Check for nested arrays
arrays = 0
for strings.HasPrefix(value, "[]") {
arrays++
value = value[2:]
}
//What's left over is the model name
modelOrResponse = value
} else {
foundDescription := false
if tag == DescriptionTag {
foundDescription = true
}
if foundDescription {
//Descriptions are special, they make they read the rest of the line
descriptionWords := []string{value}
if i < len(tags)-1 {
descriptionWords = append(descriptionWords, tags[i+1:]...)
}
description = strings.Join(descriptionWords, " ")
break
} else {
if tag == ResponseTag || tag == BodyTag || tag == DescriptionTag {
err = fmt.Errorf("Found valid tag %s, but not in a valid position", tag)
} else {
err = fmt.Errorf("Found invalid tag: %s", tag)
}
//return error
return
}
}
}
//TODO: Maybe do, if !parsedModelOrResponse {return some error}
return
}
func (ss *setOpResponses) Parse(lines []string) error {
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
return nil
}
var def *spec.Response
var scr map[int]spec.Response
for _, line := range lines {
kv := strings.SplitN(line, ":", 2)
var key, value string
if len(kv) > 1 {
key = strings.TrimSpace(kv[0])
if key == "" {
// this must be some weird empty line
continue
}
value = strings.TrimSpace(kv[1])
if value == "" {
var resp spec.Response
if strings.EqualFold("default", key) {
if def == nil {
def = &resp
}
} else {
if sc, err := strconv.Atoi(key); err == nil {
if scr == nil {
scr = make(map[int]spec.Response)
}
scr[sc] = resp
}
}
continue
}
refTarget, arrays, isDefinitionRef, description, err := parseTags(value)
if err != nil {
return err
}
//A possible exception for having a definition
if _, ok := ss.responses[refTarget]; !ok {
if _, ok := ss.definitions[refTarget]; ok {
isDefinitionRef = true
}
}
var ref spec.Ref
if isDefinitionRef {
if description == "" {
description = refTarget
}
ref, err = spec.NewRef("#/definitions/" + refTarget)
} else {
ref, err = spec.NewRef("#/responses/" + refTarget)
}
if err != nil {
return err
}
// description should used on anyway.
resp := spec.Response{ResponseProps: spec.ResponseProps{Description: description}}
if isDefinitionRef {
resp.Schema = new(spec.Schema)
resp.Description = description
if arrays == 0 {
resp.Schema.Ref = ref
} else {
cs := resp.Schema
for i := 0; i < arrays; i++ {
cs.Typed("array", "")
cs.Items = new(spec.SchemaOrArray)
cs.Items.Schema = new(spec.Schema)
cs = cs.Items.Schema
}
cs.Ref = ref
}
// ref. could be empty while use description tag
} else if len(refTarget) > 0 {
resp.Ref = ref
}
if strings.EqualFold("default", key) {
if def == nil {
def = &resp
}
} else {
if sc, err := strconv.Atoi(key); err == nil {
if scr == nil {
scr = make(map[int]spec.Response)
}
scr[sc] = resp
}
}
}
}
ss.set(def, scr)
return nil
}
func parseEnum(val string, s *spec.SimpleSchema) []interface{} {
list := strings.Split(val, ",")
interfaceSlice := make([]interface{}, len(list))
for i, d := range list {
v, err := parseValueFromSchema(d, s)
if err != nil {
interfaceSlice[i] = d
continue
}
interfaceSlice[i] = v
}
return interfaceSlice
}