podman
804 строки · 23.2 Кб
1// Copyright 2015 go-swagger maintainers
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15package validate
16
17import (
18"encoding/json"
19"fmt"
20"sort"
21"strings"
22
23"github.com/go-openapi/analysis"
24"github.com/go-openapi/errors"
25"github.com/go-openapi/jsonpointer"
26"github.com/go-openapi/loads"
27"github.com/go-openapi/spec"
28"github.com/go-openapi/strfmt"
29)
30
31// Spec validates an OpenAPI 2.0 specification document.
32//
33// Returns an error flattening in a single standard error, all validation messages.
34//
35// - TODO: $ref should not have siblings
36// - TODO: make sure documentation reflects all checks and warnings
37// - TODO: check on discriminators
38// - TODO: explicit message on unsupported keywords (better than "forbidden property"...)
39// - TODO: full list of unresolved refs
40// - TODO: validate numeric constraints (issue#581): this should be handled like defaults and examples
41// - TODO: option to determine if we validate for go-swagger or in a more general context
42// - TODO: check on required properties to support anyOf, allOf, oneOf
43//
44// NOTE: SecurityScopes are maps: no need to check uniqueness
45//
46func Spec(doc *loads.Document, formats strfmt.Registry) error {
47errs, _ /*warns*/ := NewSpecValidator(doc.Schema(), formats).Validate(doc)
48if errs.HasErrors() {
49return errors.CompositeValidationError(errs.Errors...)
50}
51return nil
52}
53
54// SpecValidator validates a swagger 2.0 spec
55type SpecValidator struct {
56schema *spec.Schema // swagger 2.0 schema
57spec *loads.Document
58analyzer *analysis.Spec
59expanded *loads.Document
60KnownFormats strfmt.Registry
61Options Opts // validation options
62}
63
64// NewSpecValidator creates a new swagger spec validator instance
65func NewSpecValidator(schema *spec.Schema, formats strfmt.Registry) *SpecValidator {
66return &SpecValidator{
67schema: schema,
68KnownFormats: formats,
69Options: defaultOpts,
70}
71}
72
73// Validate validates the swagger spec
74func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) {
75var sd *loads.Document
76errs, warnings := new(Result), new(Result)
77
78if v, ok := data.(*loads.Document); ok {
79sd = v
80}
81if sd == nil {
82errs.AddErrors(invalidDocumentMsg())
83return errs, warnings // no point in continuing
84}
85s.spec = sd
86s.analyzer = analysis.New(sd.Spec())
87
88// Swagger schema validator
89schv := NewSchemaValidator(s.schema, nil, "", s.KnownFormats, SwaggerSchema(true))
90var obj interface{}
91
92// Raw spec unmarshalling errors
93if err := json.Unmarshal(sd.Raw(), &obj); err != nil {
94// NOTE: under normal conditions, the *load.Document has been already unmarshalled
95// So this one is just a paranoid check on the behavior of the spec package
96panic(InvalidDocumentError)
97}
98
99defer func() {
100// errs holds all errors and warnings,
101// warnings only warnings
102errs.MergeAsWarnings(warnings)
103warnings.AddErrors(errs.Warnings...)
104}()
105
106errs.Merge(schv.Validate(obj)) // error -
107// There may be a point in continuing to try and determine more accurate errors
108if !s.Options.ContinueOnErrors && errs.HasErrors() {
109return errs, warnings // no point in continuing
110}
111
112errs.Merge(s.validateReferencesValid()) // error -
113// There may be a point in continuing to try and determine more accurate errors
114if !s.Options.ContinueOnErrors && errs.HasErrors() {
115return errs, warnings // no point in continuing
116}
117
118errs.Merge(s.validateDuplicateOperationIDs())
119errs.Merge(s.validateDuplicatePropertyNames()) // error -
120errs.Merge(s.validateParameters()) // error -
121errs.Merge(s.validateItems()) // error -
122
123// Properties in required definition MUST validate their schema
124// Properties SHOULD NOT be declared as both required and readOnly (warning)
125errs.Merge(s.validateRequiredDefinitions()) // error and warning
126
127// There may be a point in continuing to try and determine more accurate errors
128if !s.Options.ContinueOnErrors && errs.HasErrors() {
129return errs, warnings // no point in continuing
130}
131
132// Values provided as default MUST validate their schema
133df := &defaultValidator{SpecValidator: s}
134errs.Merge(df.Validate())
135
136// Values provided as examples MUST validate their schema
137// Value provided as examples in a response without schema generate a warning
138// Known limitations: examples in responses for mime type not application/json are ignored (warning)
139ex := &exampleValidator{SpecValidator: s}
140errs.Merge(ex.Validate())
141
142errs.Merge(s.validateNonEmptyPathParamNames())
143
144// errs.Merge(s.validateRefNoSibling()) // warning only
145errs.Merge(s.validateReferenced()) // warning only
146
147return errs, warnings
148}
149
150func (s *SpecValidator) validateNonEmptyPathParamNames() *Result {
151res := new(Result)
152if s.spec.Spec().Paths == nil {
153// There is no Paths object: error
154res.AddErrors(noValidPathMsg())
155} else {
156if s.spec.Spec().Paths.Paths == nil {
157// Paths may be empty: warning
158res.AddWarnings(noValidPathMsg())
159} else {
160for k := range s.spec.Spec().Paths.Paths {
161if strings.Contains(k, "{}") {
162res.AddErrors(emptyPathParameterMsg(k))
163}
164}
165}
166}
167return res
168}
169
170func (s *SpecValidator) validateDuplicateOperationIDs() *Result {
171// OperationID, if specified, must be unique across the board
172var analyzer *analysis.Spec
173if s.expanded != nil {
174// $ref are valid: we can analyze operations on an expanded spec
175analyzer = analysis.New(s.expanded.Spec())
176} else {
177// fallback on possible incomplete picture because of previous errors
178analyzer = s.analyzer
179}
180res := new(Result)
181known := make(map[string]int)
182for _, v := range analyzer.OperationIDs() {
183if v != "" {
184known[v]++
185}
186}
187for k, v := range known {
188if v > 1 {
189res.AddErrors(nonUniqueOperationIDMsg(k, v))
190}
191}
192return res
193}
194
195type dupProp struct {
196Name string
197Definition string
198}
199
200func (s *SpecValidator) validateDuplicatePropertyNames() *Result {
201// definition can't declare a property that's already defined by one of its ancestors
202res := new(Result)
203for k, sch := range s.spec.Spec().Definitions {
204if len(sch.AllOf) == 0 {
205continue
206}
207
208knownanc := map[string]struct{}{
209"#/definitions/" + k: {},
210}
211
212ancs, rec := s.validateCircularAncestry(k, sch, knownanc)
213if rec != nil && (rec.HasErrors() || !rec.HasWarnings()) {
214res.Merge(rec)
215}
216if len(ancs) > 0 {
217res.AddErrors(circularAncestryDefinitionMsg(k, ancs))
218return res
219}
220
221knowns := make(map[string]struct{})
222dups, rep := s.validateSchemaPropertyNames(k, sch, knowns)
223if rep != nil && (rep.HasErrors() || rep.HasWarnings()) {
224res.Merge(rep)
225}
226if len(dups) > 0 {
227var pns []string
228for _, v := range dups {
229pns = append(pns, v.Definition+"."+v.Name)
230}
231res.AddErrors(duplicatePropertiesMsg(k, pns))
232}
233
234}
235return res
236}
237
238func (s *SpecValidator) resolveRef(ref *spec.Ref) (*spec.Schema, error) {
239if s.spec.SpecFilePath() != "" {
240return spec.ResolveRefWithBase(s.spec.Spec(), ref, &spec.ExpandOptions{RelativeBase: s.spec.SpecFilePath()})
241}
242// NOTE: it looks like with the new spec resolver, this code is now unrecheable
243return spec.ResolveRef(s.spec.Spec(), ref)
244}
245
246func (s *SpecValidator) validateSchemaPropertyNames(nm string, sch spec.Schema, knowns map[string]struct{}) ([]dupProp, *Result) {
247var dups []dupProp
248
249schn := nm
250schc := &sch
251res := new(Result)
252
253for schc.Ref.String() != "" {
254// gather property names
255reso, err := s.resolveRef(&schc.Ref)
256if err != nil {
257errorHelp.addPointerError(res, err, schc.Ref.String(), nm)
258return dups, res
259}
260schc = reso
261schn = sch.Ref.String()
262}
263
264if len(schc.AllOf) > 0 {
265for _, chld := range schc.AllOf {
266dup, rep := s.validateSchemaPropertyNames(schn, chld, knowns)
267if rep != nil && (rep.HasErrors() || rep.HasWarnings()) {
268res.Merge(rep)
269}
270dups = append(dups, dup...)
271}
272return dups, res
273}
274
275for k := range schc.Properties {
276_, ok := knowns[k]
277if ok {
278dups = append(dups, dupProp{Name: k, Definition: schn})
279} else {
280knowns[k] = struct{}{}
281}
282}
283
284return dups, res
285}
286
287func (s *SpecValidator) validateCircularAncestry(nm string, sch spec.Schema, knowns map[string]struct{}) ([]string, *Result) {
288res := new(Result)
289
290if sch.Ref.String() == "" && len(sch.AllOf) == 0 { // Safeguard. We should not be able to actually get there
291return nil, res
292}
293var ancs []string
294
295schn := nm
296schc := &sch
297
298for schc.Ref.String() != "" {
299reso, err := s.resolveRef(&schc.Ref)
300if err != nil {
301errorHelp.addPointerError(res, err, schc.Ref.String(), nm)
302return ancs, res
303}
304schc = reso
305schn = sch.Ref.String()
306}
307
308if schn != nm && schn != "" {
309if _, ok := knowns[schn]; ok {
310ancs = append(ancs, schn)
311}
312knowns[schn] = struct{}{}
313
314if len(ancs) > 0 {
315return ancs, res
316}
317}
318
319if len(schc.AllOf) > 0 {
320for _, chld := range schc.AllOf {
321if chld.Ref.String() != "" || len(chld.AllOf) > 0 {
322anc, rec := s.validateCircularAncestry(schn, chld, knowns)
323if rec != nil && (rec.HasErrors() || !rec.HasWarnings()) {
324res.Merge(rec)
325}
326ancs = append(ancs, anc...)
327if len(ancs) > 0 {
328return ancs, res
329}
330}
331}
332}
333return ancs, res
334}
335
336func (s *SpecValidator) validateItems() *Result {
337// validate parameter, items, schema and response objects for presence of item if type is array
338res := new(Result)
339
340for method, pi := range s.analyzer.Operations() {
341for path, op := range pi {
342for _, param := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) {
343
344if param.TypeName() == arrayType && param.ItemsTypeName() == "" {
345res.AddErrors(arrayInParamRequiresItemsMsg(param.Name, op.ID))
346continue
347}
348if param.In != swaggerBody {
349if param.Items != nil {
350items := param.Items
351for items.TypeName() == arrayType {
352if items.ItemsTypeName() == "" {
353res.AddErrors(arrayInParamRequiresItemsMsg(param.Name, op.ID))
354break
355}
356items = items.Items
357}
358}
359} else {
360// In: body
361if param.Schema != nil {
362res.Merge(s.validateSchemaItems(*param.Schema, fmt.Sprintf("body param %q", param.Name), op.ID))
363}
364}
365}
366
367var responses []spec.Response
368if op.Responses != nil {
369if op.Responses.Default != nil {
370responses = append(responses, *op.Responses.Default)
371}
372if op.Responses.StatusCodeResponses != nil {
373for _, v := range op.Responses.StatusCodeResponses {
374responses = append(responses, v)
375}
376}
377}
378
379for _, resp := range responses {
380// Response headers with array
381for hn, hv := range resp.Headers {
382if hv.TypeName() == arrayType && hv.ItemsTypeName() == "" {
383res.AddErrors(arrayInHeaderRequiresItemsMsg(hn, op.ID))
384}
385}
386if resp.Schema != nil {
387res.Merge(s.validateSchemaItems(*resp.Schema, "response body", op.ID))
388}
389}
390}
391}
392return res
393}
394
395// Verifies constraints on array type
396func (s *SpecValidator) validateSchemaItems(schema spec.Schema, prefix, opID string) *Result {
397res := new(Result)
398if !schema.Type.Contains(arrayType) {
399return res
400}
401
402if schema.Items == nil || schema.Items.Len() == 0 {
403res.AddErrors(arrayRequiresItemsMsg(prefix, opID))
404return res
405}
406
407if schema.Items.Schema != nil {
408schema = *schema.Items.Schema
409if _, err := compileRegexp(schema.Pattern); err != nil {
410res.AddErrors(invalidItemsPatternMsg(prefix, opID, schema.Pattern))
411}
412
413res.Merge(s.validateSchemaItems(schema, prefix, opID))
414}
415return res
416}
417
418func (s *SpecValidator) validatePathParamPresence(path string, fromPath, fromOperation []string) *Result {
419// Each defined operation path parameters must correspond to a named element in the API's path pattern.
420// (For example, you cannot have a path parameter named id for the following path /pets/{petId} but you must have a path parameter named petId.)
421res := new(Result)
422for _, l := range fromPath {
423var matched bool
424for _, r := range fromOperation {
425if l == "{"+r+"}" {
426matched = true
427break
428}
429}
430if !matched {
431res.AddErrors(noParameterInPathMsg(l))
432}
433}
434
435for _, p := range fromOperation {
436var matched bool
437for _, r := range fromPath {
438if "{"+p+"}" == r {
439matched = true
440break
441}
442}
443if !matched {
444res.AddErrors(pathParamNotInPathMsg(path, p))
445}
446}
447
448return res
449}
450
451func (s *SpecValidator) validateReferenced() *Result {
452var res Result
453res.MergeAsWarnings(s.validateReferencedParameters())
454res.MergeAsWarnings(s.validateReferencedResponses())
455res.MergeAsWarnings(s.validateReferencedDefinitions())
456return &res
457}
458
459// nolint: dupl
460func (s *SpecValidator) validateReferencedParameters() *Result {
461// Each referenceable definition should have references.
462params := s.spec.Spec().Parameters
463if len(params) == 0 {
464return nil
465}
466
467expected := make(map[string]struct{})
468for k := range params {
469expected["#/parameters/"+jsonpointer.Escape(k)] = struct{}{}
470}
471for _, k := range s.analyzer.AllParameterReferences() {
472delete(expected, k)
473}
474
475if len(expected) == 0 {
476return nil
477}
478result := new(Result)
479for k := range expected {
480result.AddWarnings(unusedParamMsg(k))
481}
482return result
483}
484
485// nolint: dupl
486func (s *SpecValidator) validateReferencedResponses() *Result {
487// Each referenceable definition should have references.
488responses := s.spec.Spec().Responses
489if len(responses) == 0 {
490return nil
491}
492
493expected := make(map[string]struct{})
494for k := range responses {
495expected["#/responses/"+jsonpointer.Escape(k)] = struct{}{}
496}
497for _, k := range s.analyzer.AllResponseReferences() {
498delete(expected, k)
499}
500
501if len(expected) == 0 {
502return nil
503}
504result := new(Result)
505for k := range expected {
506result.AddWarnings(unusedResponseMsg(k))
507}
508return result
509}
510
511// nolint: dupl
512func (s *SpecValidator) validateReferencedDefinitions() *Result {
513// Each referenceable definition must have references.
514defs := s.spec.Spec().Definitions
515if len(defs) == 0 {
516return nil
517}
518
519expected := make(map[string]struct{})
520for k := range defs {
521expected["#/definitions/"+jsonpointer.Escape(k)] = struct{}{}
522}
523for _, k := range s.analyzer.AllDefinitionReferences() {
524delete(expected, k)
525}
526
527if len(expected) == 0 {
528return nil
529}
530
531result := new(Result)
532for k := range expected {
533result.AddWarnings(unusedDefinitionMsg(k))
534}
535return result
536}
537
538func (s *SpecValidator) validateRequiredDefinitions() *Result {
539// Each property listed in the required array must be defined in the properties of the model
540res := new(Result)
541
542DEFINITIONS:
543for d, schema := range s.spec.Spec().Definitions {
544if schema.Required != nil { // Safeguard
545for _, pn := range schema.Required {
546red := s.validateRequiredProperties(pn, d, &schema) //#nosec
547res.Merge(red)
548if !red.IsValid() && !s.Options.ContinueOnErrors {
549break DEFINITIONS // there is an error, let's stop that bleeding
550}
551}
552}
553}
554return res
555}
556
557func (s *SpecValidator) validateRequiredProperties(path, in string, v *spec.Schema) *Result {
558// Takes care of recursive property definitions, which may be nested in additionalProperties schemas
559res := new(Result)
560propertyMatch := false
561patternMatch := false
562additionalPropertiesMatch := false
563isReadOnly := false
564
565// Regular properties
566if _, ok := v.Properties[path]; ok {
567propertyMatch = true
568isReadOnly = v.Properties[path].ReadOnly
569}
570
571// NOTE: patternProperties are not supported in swagger. Even though, we continue validation here
572// We check all defined patterns: if one regexp is invalid, croaks an error
573for pp, pv := range v.PatternProperties {
574re, err := compileRegexp(pp)
575if err != nil {
576res.AddErrors(invalidPatternMsg(pp, in))
577} else if re.MatchString(path) {
578patternMatch = true
579if !propertyMatch {
580isReadOnly = pv.ReadOnly
581}
582}
583}
584
585if !(propertyMatch || patternMatch) {
586if v.AdditionalProperties != nil {
587if v.AdditionalProperties.Allows && v.AdditionalProperties.Schema == nil {
588additionalPropertiesMatch = true
589} else if v.AdditionalProperties.Schema != nil {
590// additionalProperties as schema are upported in swagger
591// recursively validates additionalProperties schema
592// TODO : anyOf, allOf, oneOf like in schemaPropsValidator
593red := s.validateRequiredProperties(path, in, v.AdditionalProperties.Schema)
594if red.IsValid() {
595additionalPropertiesMatch = true
596if !propertyMatch && !patternMatch {
597isReadOnly = v.AdditionalProperties.Schema.ReadOnly
598}
599}
600res.Merge(red)
601}
602}
603}
604
605if !(propertyMatch || patternMatch || additionalPropertiesMatch) {
606res.AddErrors(requiredButNotDefinedMsg(path, in))
607}
608
609if isReadOnly {
610res.AddWarnings(readOnlyAndRequiredMsg(in, path))
611}
612return res
613}
614
615func (s *SpecValidator) validateParameters() *Result {
616// - for each method, path is unique, regardless of path parameters
617// e.g. GET:/petstore/{id}, GET:/petstore/{pet}, GET:/petstore are
618// considered duplicate paths
619// - each parameter should have a unique `name` and `type` combination
620// - each operation should have only 1 parameter of type body
621// - there must be at most 1 parameter in body
622// - parameters with pattern property must specify valid patterns
623// - $ref in parameters must resolve
624// - path param must be required
625res := new(Result)
626rexGarbledPathSegment := mustCompileRegexp(`.*[{}\s]+.*`)
627for method, pi := range s.expandedAnalyzer().Operations() {
628methodPaths := make(map[string]map[string]string)
629for path, op := range pi {
630pathToAdd := pathHelp.stripParametersInPath(path)
631
632// Warn on garbled path afer param stripping
633if rexGarbledPathSegment.MatchString(pathToAdd) {
634res.AddWarnings(pathStrippedParamGarbledMsg(pathToAdd))
635}
636
637// Check uniqueness of stripped paths
638if _, found := methodPaths[method][pathToAdd]; found {
639
640// Sort names for stable, testable output
641if strings.Compare(path, methodPaths[method][pathToAdd]) < 0 {
642res.AddErrors(pathOverlapMsg(path, methodPaths[method][pathToAdd]))
643} else {
644res.AddErrors(pathOverlapMsg(methodPaths[method][pathToAdd], path))
645}
646} else {
647if _, found := methodPaths[method]; !found {
648methodPaths[method] = map[string]string{}
649}
650methodPaths[method][pathToAdd] = path // Original non stripped path
651
652}
653
654var bodyParams []string
655var paramNames []string
656var hasForm, hasBody bool
657
658// Check parameters names uniqueness for operation
659// TODO: should be done after param expansion
660res.Merge(s.checkUniqueParams(path, method, op))
661
662for _, pr := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) {
663// Validate pattern regexp for parameters with a Pattern property
664if _, err := compileRegexp(pr.Pattern); err != nil {
665res.AddErrors(invalidPatternInParamMsg(op.ID, pr.Name, pr.Pattern))
666}
667
668// There must be at most one parameter in body: list them all
669if pr.In == swaggerBody {
670bodyParams = append(bodyParams, fmt.Sprintf("%q", pr.Name))
671hasBody = true
672}
673
674if pr.In == "path" {
675paramNames = append(paramNames, pr.Name)
676// Path declared in path must have the required: true property
677if !pr.Required {
678res.AddErrors(pathParamRequiredMsg(op.ID, pr.Name))
679}
680}
681
682if pr.In == "formData" {
683hasForm = true
684}
685
686if !(pr.Type == numberType || pr.Type == integerType) &&
687(pr.Maximum != nil || pr.Minimum != nil || pr.MultipleOf != nil) {
688// A non-numeric parameter has validation keywords for numeric instances (number and integer)
689res.AddWarnings(parameterValidationTypeMismatchMsg(pr.Name, path, pr.Type))
690}
691
692if !(pr.Type == stringType) &&
693// A non-string parameter has validation keywords for strings
694(pr.MaxLength != nil || pr.MinLength != nil || pr.Pattern != "") {
695res.AddWarnings(parameterValidationTypeMismatchMsg(pr.Name, path, pr.Type))
696}
697
698if !(pr.Type == arrayType) &&
699// A non-array parameter has validation keywords for arrays
700(pr.MaxItems != nil || pr.MinItems != nil || pr.UniqueItems) {
701res.AddWarnings(parameterValidationTypeMismatchMsg(pr.Name, path, pr.Type))
702}
703}
704
705// In:formData and In:body are mutually exclusive
706if hasBody && hasForm {
707res.AddErrors(bothFormDataAndBodyMsg(op.ID))
708}
709// There must be at most one body param
710// Accurately report situations when more than 1 body param is declared (possibly unnamed)
711if len(bodyParams) > 1 {
712sort.Strings(bodyParams)
713res.AddErrors(multipleBodyParamMsg(op.ID, bodyParams))
714}
715
716// Check uniqueness of parameters in path
717paramsInPath := pathHelp.extractPathParams(path)
718for i, p := range paramsInPath {
719for j, q := range paramsInPath {
720if p == q && i > j {
721res.AddErrors(pathParamNotUniqueMsg(path, p, q))
722break
723}
724}
725}
726
727// Warns about possible malformed params in path
728rexGarbledParam := mustCompileRegexp(`{.*[{}\s]+.*}`)
729for _, p := range paramsInPath {
730if rexGarbledParam.MatchString(p) {
731res.AddWarnings(pathParamGarbledMsg(path, p))
732}
733}
734
735// Match params from path vs params from params section
736res.Merge(s.validatePathParamPresence(path, paramsInPath, paramNames))
737}
738}
739return res
740}
741
742func (s *SpecValidator) validateReferencesValid() *Result {
743// each reference must point to a valid object
744res := new(Result)
745for _, r := range s.analyzer.AllRefs() {
746if !r.IsValidURI(s.spec.SpecFilePath()) { // Safeguard - spec should always yield a valid URI
747res.AddErrors(invalidRefMsg(r.String()))
748}
749}
750if !res.HasErrors() {
751// NOTE: with default settings, loads.Document.Expanded()
752// stops on first error. Anyhow, the expand option to continue
753// on errors fails to report errors at all.
754exp, err := s.spec.Expanded()
755if err != nil {
756res.AddErrors(unresolvedReferencesMsg(err))
757}
758s.expanded = exp
759}
760return res
761}
762
763func (s *SpecValidator) checkUniqueParams(path, method string, op *spec.Operation) *Result {
764// Check for duplicate parameters declaration in param section.
765// Each parameter should have a unique `name` and `type` combination
766// NOTE: this could be factorized in analysis (when constructing the params map)
767// However, there are some issues with such a factorization:
768// - analysis does not seem to fully expand params
769// - param keys may be altered by x-go-name
770res := new(Result)
771pnames := make(map[string]struct{})
772
773if op.Parameters != nil { // Safeguard
774for _, ppr := range op.Parameters {
775var ok bool
776pr, red := paramHelp.resolveParam(path, method, op.ID, &ppr, s) //#nosec
777res.Merge(red)
778
779if pr != nil && pr.Name != "" { // params with empty name does no participate the check
780key := fmt.Sprintf("%s#%s", pr.In, pr.Name)
781
782if _, ok = pnames[key]; ok {
783res.AddErrors(duplicateParamNameMsg(pr.In, pr.Name, op.ID))
784}
785pnames[key] = struct{}{}
786}
787}
788}
789return res
790}
791
792// SetContinueOnErrors sets the ContinueOnErrors option for this validator.
793func (s *SpecValidator) SetContinueOnErrors(c bool) {
794s.Options.ContinueOnErrors = c
795}
796
797// expandedAnalyzer returns expanded.Analyzer when it is available.
798// otherwise just analyzer.
799func (s *SpecValidator) expandedAnalyzer() *analysis.Spec {
800if s.expanded != nil && s.expanded.Analyzer != nil {
801return s.expanded.Analyzer
802}
803return s.analyzer
804}
805