diff --git a/LICENSE b/LICENSE index 8dada3edaf..cee7d3d815 100644 --- a/LICENSE +++ b/LICENSE @@ -199,3 +199,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + + +Licenses for 3rd party libraries included in code: + +1. library protoc-gen-jsonschema (https://github.com/chrusty/protoc-gen-jsonschema) - +License: Apache License Version 2.0, January 2004 (full license included in plugins/restapi/jsonschema/LICENSE) diff --git a/README.md b/README.md index 5fd3b489a5..9a4b252974 100644 --- a/README.md +++ b/README.md @@ -116,6 +116,10 @@ If you are interested in contributing, please see the [contribution guidelines][ [![GitHub license](https://img.shields.io/badge/license-Apache%20license%202.0-blue.svg)](https://github.com/ligato/vpp-agent/blob/master/LICENSE) +## Modified 3rd party tools included + + - [protoc-gen-jsonschema][tool-included-jsonchema] ([code location in this repository][local-place-for-jsonchema]) + [agentctl]: cmd/agentctl [cn-infra]: https://github.com/ligato/cn-infra [contiv-vpp]: https://github.com/contiv/vpp @@ -131,3 +135,5 @@ If you are interested in contributing, please see the [contribution guidelines][ [vpp]: https://fd.io/vppproject/vpptech/ [vpp-agent]: https://hub.docker.com/r/ligato/vpp-agent [vpp-agent-arm64]: https://hub.docker.com/r/ligato/vpp-agent-arm64 +[tool-included-jsonchema]: https://github.com/chrusty/protoc-gen-jsonschema/tree/de75f1b59c4e0f5d5edf7be2a18d1c8e4d81b17a +[local-place-for-jsonchema]: plugins/restapi/jsonschema \ No newline at end of file diff --git a/go.mod b/go.mod index d6edac6030..91a827d57a 100644 --- a/go.mod +++ b/go.mod @@ -5,6 +5,7 @@ go 1.13 require ( git.fd.io/govpp.git v0.3.6-0.20200907135408-e517439567ad github.com/Shopify/sarama v1.20.1 // indirect + github.com/alecthomas/jsonschema v0.0.0-20200217214135-7152f22193c9 github.com/alicebob/miniredis v2.5.0+incompatible // indirect github.com/common-nighthawk/go-figure v0.0.0-20200609044655-c4b36f998cf2 github.com/coreos/bbolt v1.3.3 // indirect @@ -26,6 +27,7 @@ require ( github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 github.com/grpc-ecosystem/grpc-gateway v1.11.3 // indirect github.com/hashicorp/go-uuid v1.0.1 // indirect + github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 github.com/jhump/protoreflect v1.7.0 github.com/lunixbochs/struc v0.0.0-20200521075829-a4cb8d33dbbe github.com/mitchellh/go-ps v0.0.0-20170309133038-4fdf99ab2936 @@ -44,6 +46,9 @@ require ( github.com/unrolled/render v0.0.0-20180914162206-b9786414de4d github.com/vishvananda/netlink v0.0.0-20180910184128-56b1bd27a9a3 github.com/vishvananda/netns v0.0.0-20180720170159-13995c7128cc + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + github.com/xeipuuv/gojsonschema v1.1.0 github.com/yuin/gopher-lua v0.0.0-20190514113301-1cd887cd7036 // indirect go.ligato.io/cn-infra/v2 v2.5.0-alpha.0.20200313154441-b0d4c1b11c73 go.uber.org/multierr v1.2.0 // indirect diff --git a/go.sum b/go.sum index 14edc7eed7..ac1975fd5a 100644 --- a/go.sum +++ b/go.sum @@ -22,6 +22,8 @@ github.com/Shopify/sarama v1.20.1/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWX github.com/Shopify/toxiproxy v2.1.4+incompatible h1:TKdv8HiTLgE5wdJuEML90aBgNWsokNbMijUGhmcoBJc= github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= github.com/Songmu/prompter v0.0.0-20150725163906-b5721e8d5566/go.mod h1:fNhSFBGC+sg+dZ7AqDHgq+xYiom23TeTESzUbO7PIrE= +github.com/alecthomas/jsonschema v0.0.0-20200217214135-7152f22193c9 h1:h+KAZEUnNceFhqyH46BgwH4lk8m6pdR/3x3h7IPn7VA= +github.com/alecthomas/jsonschema v0.0.0-20200217214135-7152f22193c9/go.mod h1:/n6+1/DWPltRLWL/VKyUxg6tzsl5kHUCcraimt4vr60= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf h1:qet1QNfXsQxTZqLG4oE62mJzwPIB8+Tee4RNCL9ulrY= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= @@ -51,6 +53,8 @@ github.com/bsm/sarama-cluster v2.1.15+incompatible/go.mod h1:r7ao+4tTNXvWm+VRpRJ github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/chrusty/protoc-gen-jsonschema v0.0.0-20201201182816-de75f1b59c4e h1:VEDA+FrTIUnlSpMlo2i1e0L1hP45vek2ED+blYdOrxg= +github.com/chrusty/protoc-gen-jsonschema v0.0.0-20201201182816-de75f1b59c4e/go.mod h1:qYuJI3Nz/kjHcigPikCSSeh+pRGcCiT1U6qzWGEmaJ4= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -266,6 +270,8 @@ github.com/howeyc/crc16 v0.0.0-20171223171357-2b2a61e366a6 h1:IIVxLyDUYErC950b8k github.com/howeyc/crc16 v0.0.0-20171223171357-2b2a61e366a6/go.mod h1:JslaLRrzGsOKJgFEPBP65Whn+rdwDQSk0I0MCRFe2Zw= github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 h1:i462o439ZjprVSFSZLZxcsoAe592sZB1rci2Z8j4wdk= +github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= @@ -281,6 +287,7 @@ github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQL github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= @@ -447,6 +454,7 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.3.1-0.20190311161405-34c6fa2dc709/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/tinylib/msgp v1.0.2 h1:DfdQrzQa7Yh2es9SuLkixqxuXS2SxsdYn0KbdrOGWD8= @@ -464,6 +472,14 @@ github.com/vishvananda/netlink v0.0.0-20180910184128-56b1bd27a9a3/go.mod h1:+SR5 github.com/vishvananda/netns v0.0.0-20180720170159-13995c7128cc h1:R83G5ikgLMxrBvLh22JhdfI8K6YXEPHx5P03Uu3DRs4= github.com/vishvananda/netns v0.0.0-20180720170159-13995c7128cc/go.mod h1:ZjcWmFBXmLKZu9Nxj3WKYEafiSqer2rnvPr0en9UNpI= github.com/willfaught/gockle v0.0.0-20160623235217-4f254e1e0f0a/go.mod h1:NLcF+3nDpXVIZatjn5Z97gKzFFVU7TzgbAcs8G7/Jrs= +github.com/xeipuuv/gojsonpointer v0.0.0-20190809123943-df4f5c81cb3b h1:6cLsL+2FW6dRAdl5iMtHgRogVCff0QpRi9653YmdcJA= +github.com/xeipuuv/gojsonpointer v0.0.0-20190809123943-df4f5c81cb3b/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.1.0 h1:ngVtJC9TY/lg0AA/1k48FYhBrhRoFlEmWzsehpNAaZg= +github.com/xeipuuv/gojsonschema v1.1.0/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs= github.com/xiang90/probing v0.0.0-20160813154853-07dd2e8dfe18/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2 h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= @@ -560,6 +576,7 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190514135907-3a4b5fb9f71f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/plugins/restapi/handlers.go b/plugins/restapi/handlers.go index 531cac4581..c4073f1c88 100644 --- a/plugins/restapi/handlers.go +++ b/plugins/restapi/handlers.go @@ -17,19 +17,41 @@ package restapi import ( + "bytes" "context" "fmt" "net/http" "runtime" + "strings" "github.com/go-errors/errors" + "github.com/golang/protobuf/proto" + protoc_plugin "github.com/golang/protobuf/protoc-gen-go/plugin" "github.com/unrolled/render" + "go.ligato.io/cn-infra/v2/logging/logrus" + "go.ligato.io/vpp-agent/v3/client" "go.ligato.io/vpp-agent/v3/cmd/agentctl/api/types" "go.ligato.io/vpp-agent/v3/pkg/version" "go.ligato.io/vpp-agent/v3/plugins/configurator" + "go.ligato.io/vpp-agent/v3/plugins/restapi/jsonschema/converter" "go.ligato.io/vpp-agent/v3/plugins/restapi/resturl" interfaces "go.ligato.io/vpp-agent/v3/proto/ligato/vpp/interfaces" + "google.golang.org/protobuf/reflect/protodesc" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/types/descriptorpb" +) + +const ( + // URLFieldNamingParamName is URL parameter name for JSON schema http handler's setting + // to output field names using proto/json/both names for fields + URLFieldNamingParamName = "fieldnames" + // OnlyProtoFieldNames is URL parameter value for JSON schema http handler to use only proto names as field names + OnlyProtoFieldNames = "onlyproto" + // OnlyJSONFieldNames is URL parameter value for JSON schema http handler to use only JSON names as field names + OnlyJSONFieldNames = "onlyjson" + + internalErrorLogPrefix = "500 Internal server error: " ) var ( @@ -40,6 +62,7 @@ var ( func (p *Plugin) registerInfoHandlers() { p.HTTPHandlers.RegisterHTTPHandler(resturl.Version, p.versionHandler, GET) + p.HTTPHandlers.RegisterHTTPHandler(resturl.JSONSchema, p.jsonSchemaHandler, GET) } // Registers ABF REST handler @@ -315,6 +338,138 @@ func (p *Plugin) registerHTTPHandler(key, method string, f func() (interface{}, p.HTTPHandlers.RegisterHTTPHandler(key, handlerFunc, method) } +// jsonSchemaHandler returns JSON schema of VPP-Agent configuration. +// This handler also accepts URL query parameters changing the exported field names of proto messages. By default, +// proto message fields are exported twice in JSON scheme. Once with proto name and once with JSON name. This should +// allow to use any of the 2 forms in JSON/YAML configuration when used JSON schema for validation. However, +// this behaviour can be modified by URLFieldNamingParamName URL query parameter, that force to export only +// proto named fields (OnlyProtoFieldNames URL query parameter value) or JSON named fields (OnlyJSONFieldNames +// URL query parameter value). +func (p *Plugin) jsonSchemaHandler(formatter *render.Render) http.HandlerFunc { + return func(w http.ResponseWriter, req *http.Request) { + // create FileDescriptorProto for dynamic Config holding all VPP-Agent configuration + knownModels, err := client.LocalClient.KnownModels("config") // locally registered models + if err != nil { + errMsg := fmt.Sprintf("can't get registered models: %v\n", err) + p.Log.Error(internalErrorLogPrefix + errMsg) + p.logError(formatter.JSON(w, http.StatusInternalServerError, errMsg)) + return + } + config, err := client.NewDynamicConfig(knownModels) + if err != nil { + errMsg := fmt.Sprintf("can't create dynamic config due to: %v\n", err) + p.Log.Error(internalErrorLogPrefix + errMsg) + p.logError(formatter.JSON(w, http.StatusInternalServerError, errMsg)) + return + } + dynConfigFileDescProto := protodesc.ToFileDescriptorProto(config.ProtoReflect().Descriptor().ParentFile()) + + // create list of all FileDescriptorProtos (imports should be before converted proto file -> dynConfig is last) + fileDescriptorProtos := allFileDescriptorProtos(knownModels) + fileDescriptorProtos = append(fileDescriptorProtos, dynConfigFileDescProto) + + // creating input for protoc's plugin (code extracted in plugins/restapi/jsonschema) that can convert + // FileDescriptorProtos to JSONSchema + params := []string{ + "messages=[Dynamic_config]", // targeting only the main config message (proto file has also other messages) + "disallow_additional_properties", // additional unknown json fields makes configuration applying fail + } + fieldNamesConverterParam := "proto_and_json_fieldnames" // create proto and json named fields by default + if fieldNames, found := req.URL.Query()[URLFieldNamingParamName]; found && len(fieldNames) > 0 { + // converting REST API request params to 3rd party tool params + switch fieldNames[0] { + case OnlyProtoFieldNames: + fieldNamesConverterParam = "" + case OnlyJSONFieldNames: + fieldNamesConverterParam = "json_fieldnames" + } + } + if fieldNamesConverterParam != "" { + params = append(params, fieldNamesConverterParam) + } + paramsStr := strings.Join(params, ",") + cgReq := &protoc_plugin.CodeGeneratorRequest{ + ProtoFile: fileDescriptorProtos, + FileToGenerate: []string{dynConfigFileDescProto.GetName()}, + Parameter: ¶msStr, + CompilerVersion: nil, // compiler version is not need in this protoc plugin + } + cgReqMarshalled, err := proto.Marshal(cgReq) + if err != nil { + errMsg := fmt.Sprintf("can't proto marshal CodeGeneratorRequest: %v\n", err) + p.Log.Error(internalErrorLogPrefix + errMsg) + p.logError(formatter.JSON(w, http.StatusInternalServerError, errMsg)) + return + } + + // use JSON schema converter and handle error cases + p.Log.Debug("Processing code generator request") + protoConverter := converter.New(logrus.DefaultLogger().StandardLogger()) + res, err := protoConverter.ConvertFrom(bytes.NewReader(cgReqMarshalled)) + if err != nil { + if res == nil { + errMsg := fmt.Sprintf("failed to read registered model configuration input: %v\n", err) + p.Log.Error(internalErrorLogPrefix + errMsg) + p.logError(formatter.JSON(w, http.StatusInternalServerError, errMsg)) + return + } + errMsg := fmt.Sprintf("failed generate JSON schema: %v (%v)\n", res.Error, err) + p.Log.Error(internalErrorLogPrefix + errMsg) + p.logError(formatter.JSON(w, http.StatusInternalServerError, errMsg)) + return + } + + // extract json schema + // (protoc_plugin.CodeGeneratorResponse could have cut the file content into multiple pieces + // for performance optimization (due to godoc), but we know that all pieces are only one file + // due to requesting one file -> join all content together) + var sb strings.Builder + for _, file := range res.File { + sb.WriteString(file.GetContent()) + } + + // writing response + // (jsonschema is in raw form (string) and non of the available format renders supports raw data output + // with customizable content type setting in header -> custom handling) + w.Header().Set(render.ContentType, render.ContentJSON+"; charset=UTF-8") + w.Write([]byte(sb.String())) // will also call WriteHeader(http.StatusOK) automatically + } +} + +// allImports retrieves all imports from given FileDescriptor including transitive imports (import +// duplication can occur) +func allImports(fileDesc protoreflect.FileDescriptor) []protoreflect.FileDescriptor { + result := make([]protoreflect.FileDescriptor, 0) + imports := fileDesc.Imports() + for i := 0; i < imports.Len(); i++ { + currentImport := imports.Get(i).FileDescriptor + result = append(result, currentImport) + result = append(result, allImports(currentImport)...) + } + return result +} + +// allFileDescriptorProtos retrieves all FileDescriptorProtos related to given models (including +// all imported proto files) +func allFileDescriptorProtos(knownModels []*client.ModelInfo) []*descriptorpb.FileDescriptorProto { + // extract all FileDescriptors for given known models (including direct and transitive file imports) + fileDescriptors := make(map[string]protoreflect.FileDescriptor) // using map for deduplication + for _, knownModel := range knownModels { + protoFile := knownModel.MessageDescriptor.ParentFile() + fileDescriptors[protoFile.Path()] = protoFile + for _, importProtoFile := range allImports(protoFile) { + fileDescriptors[importProtoFile.Path()] = importProtoFile + } + } + + // convert retrieved FileDescriptors to FileDescriptorProtos + fileDescriptorProtos := make([]*descriptorpb.FileDescriptorProto, 0, len(knownModels)) + for _, fileDescriptor := range fileDescriptors { + fileDescriptorProtos = append(fileDescriptorProtos, protodesc.ToFileDescriptorProto(fileDescriptor)) + } + return fileDescriptorProtos +} + // versionHandler returns version of Agent. func (p *Plugin) versionHandler(formatter *render.Render) http.HandlerFunc { return func(w http.ResponseWriter, req *http.Request) { diff --git a/plugins/restapi/jsonschema/LICENSE b/plugins/restapi/jsonschema/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/plugins/restapi/jsonschema/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/restapi/jsonschema/README.md b/plugins/restapi/jsonschema/README.md new file mode 100644 index 0000000000..e264665af9 --- /dev/null +++ b/plugins/restapi/jsonschema/README.md @@ -0,0 +1,20 @@ +# Protoc-gen-jsonschema +Content of this package and subpackages is modified code of 3rd party library [protoc-gen-jsonschema](https://github.com/chrusty/protoc-gen-jsonschema). +The purpose of the tool is to provide proto to JSON schema conversion in the form of protoc plugin. +The customization for ligato is not touching the conversion functionality, but only removes the protoc +dependency and enables it to be used as library (internal packages in original repository). + + +## Changes tracking +The base code for ligato modifications is [here](https://github.com/chrusty/protoc-gen-jsonschema/tree/de75f1b59c4e0f5d5edf7be2a18d1c8e4d81b17a). +Initial commit changes: + +- removed all unnecessary parts + - project root files except License + - CMD part for protoc connection + - convertor tests (they are dependent on protoc at test runtime) +- extracted convertor out of internal package to be able to use it +- relaxed some info level logging to debug level logging (proto_package.go, lines 78 and 121) +- removed "oneof" type from enums to provide compatibility with external json example generator (types.go line 137 and converter.go line 116) + +Other changes can be tracked by git changes in this package and its subpackages \ No newline at end of file diff --git a/plugins/restapi/jsonschema/converter/converter.go b/plugins/restapi/jsonschema/converter/converter.go new file mode 100644 index 0000000000..6c0dc6866a --- /dev/null +++ b/plugins/restapi/jsonschema/converter/converter.go @@ -0,0 +1,271 @@ +package converter + +import ( + "encoding/json" + "fmt" + "io" + "io/ioutil" + "path" + "regexp" + "strings" + + "github.com/alecthomas/jsonschema" + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/protoc-gen-go/descriptor" + plugin "github.com/golang/protobuf/protoc-gen-go/plugin" + "github.com/sirupsen/logrus" +) + +const ( + messageDelimiter = "+" +) + +// Converter is everything you need to convert protos to JSONSchemas: +type Converter struct { + AllFieldsRequired bool + AllowNullValues bool + DisallowAdditionalProperties bool + DisallowBigIntsAsStrings bool + PrefixSchemaFilesWithPackage bool + UseJSONFieldnamesOnly bool + UseProtoAndJSONFieldnames bool + logger *logrus.Logger + sourceInfo *sourceCodeInfo + messageTargets []string +} + +// New returns a configured *Converter: +func New(logger *logrus.Logger) *Converter { + return &Converter{ + logger: logger, + } +} + +// ConvertFrom tells the convert to work on the given input: +func (c *Converter) ConvertFrom(rd io.Reader) (*plugin.CodeGeneratorResponse, error) { + c.logger.Debug("Reading code generation request") + input, err := ioutil.ReadAll(rd) + if err != nil { + c.logger.WithError(err).Error("Failed to read request") + return nil, err + } + + req := &plugin.CodeGeneratorRequest{} + err = proto.Unmarshal(input, req) + if err != nil { + c.logger.WithError(err).Error("Can't unmarshal input") + return nil, err + } + + c.logger.Debug("Converting input") + return c.convert(req) +} + +func (c *Converter) parseGeneratorParameters(parameters string) { + for _, parameter := range strings.Split(parameters, ",") { + switch parameter { + case "all_fields_required": + c.AllFieldsRequired = true + case "allow_null_values": + c.AllowNullValues = true + case "debug": + c.logger.SetLevel(logrus.DebugLevel) + case "disallow_additional_properties": + c.DisallowAdditionalProperties = true + case "disallow_bigints_as_strings": + c.DisallowBigIntsAsStrings = true + case "json_fieldnames": + c.UseJSONFieldnamesOnly = true + case "prefix_schema_files_with_package": + c.PrefixSchemaFilesWithPackage = true + case "proto_and_json_fieldnames": + c.UseProtoAndJSONFieldnames = true + } + + // look for specific message targets + // message types are separated by messageDelimiter "+" + // examples: + // messages=[foo+bar] + // messages=[foo] + rx := regexp.MustCompile(`messages=\[([^\]]+)\]`) + if matches := rx.FindStringSubmatch(parameter); len(matches) == 2 { + c.messageTargets = strings.Split(matches[1], messageDelimiter) + } + } +} + +// Converts a proto "ENUM" into a JSON-Schema: +func (c *Converter) convertEnumType(enum *descriptor.EnumDescriptorProto) (jsonschema.Type, error) { + + // Prepare a new jsonschema.Type for our eventual return value: + jsonSchemaType := jsonschema.Type{ + Version: jsonschema.Version, + } + + // Generate a description from src comments (if available) + if src := c.sourceInfo.GetEnum(enum); src != nil { + jsonSchemaType.Description = formatDescription(src) + } + + // Note: not setting type specification(oneof string and integer), because explicitly saying which + // values are valid (and any other is invalid) is enough specification what can be used + // (this also overcome bug in example creator https://json-schema-faker.js.org/ that doesn't select + // correct type for enum value but rather chooses random type from oneof and cast value to that type) + // + // Allow both strings and integers: + //jsonSchemaType.OneOf = append(jsonSchemaType.OneOf, &jsonschema.Type{Type: "string"}) + //jsonSchemaType.OneOf = append(jsonSchemaType.OneOf, &jsonschema.Type{Type: "integer"}) + + // Add the allowed values: + for _, enumValue := range enum.Value { + jsonSchemaType.Enum = append(jsonSchemaType.Enum, enumValue.Name) + jsonSchemaType.Enum = append(jsonSchemaType.Enum, enumValue.Number) + } + + return jsonSchemaType, nil +} + +// Converts a proto file into a JSON-Schema: +func (c *Converter) convertFile(file *descriptor.FileDescriptorProto) ([]*plugin.CodeGeneratorResponse_File, error) { + // Input filename: + protoFileName := path.Base(file.GetName()) + + // Prepare a list of responses: + var response []*plugin.CodeGeneratorResponse_File + + // user wants specific messages + genSpecificMessages := len(c.messageTargets) > 0 + + // Warn about multiple messages / enums in files: + if !genSpecificMessages && len(file.GetMessageType()) > 1 { + c.logger.WithField("schemas", len(file.GetMessageType())).WithField("proto_filename", protoFileName).Warn("protoc-gen-jsonschema will create multiple MESSAGE schemas from one proto file") + } + + if len(file.GetEnumType()) > 1 { + c.logger.WithField("schemas", len(file.GetMessageType())).WithField("proto_filename", protoFileName).Warn("protoc-gen-jsonschema will create multiple ENUM schemas from one proto file") + } + + // Generate standalone ENUMs: + if len(file.GetMessageType()) == 0 { + for _, enum := range file.GetEnumType() { + jsonSchemaFileName := c.generateSchemaFilename(file, enum.GetName()) + c.logger.WithField("proto_filename", protoFileName).WithField("enum_name", enum.GetName()).WithField("jsonschema_filename", jsonSchemaFileName).Info("Generating JSON-schema for stand-alone ENUM") + + // Convert the ENUM: + enumJSONSchema, err := c.convertEnumType(enum) + if err != nil { + c.logger.WithError(err).WithField("proto_filename", protoFileName).Error("Failed to convert") + return nil, err + } + + // Marshal the JSON-Schema into JSON: + jsonSchemaJSON, err := json.MarshalIndent(enumJSONSchema, "", " ") + if err != nil { + c.logger.WithError(err).Error("Failed to encode jsonSchema") + return nil, err + } + + // Add a response: + resFile := &plugin.CodeGeneratorResponse_File{ + Name: proto.String(jsonSchemaFileName), + Content: proto.String(string(jsonSchemaJSON)), + } + response = append(response, resFile) + } + } else { + // Otherwise process MESSAGES (packages): + pkg, ok := c.relativelyLookupPackage(globalPkg, file.GetPackage()) + if !ok { + return nil, fmt.Errorf("no such package found: %s", file.GetPackage()) + } + + for _, msg := range file.GetMessageType() { + // skip if we are only generating schema for specific messages + if genSpecificMessages && !contains(c.messageTargets, msg.GetName()) { + continue + } + + jsonSchemaFileName := c.generateSchemaFilename(file, msg.GetName()) + c.logger.WithField("proto_filename", protoFileName).WithField("msg_name", msg.GetName()).WithField("jsonschema_filename", jsonSchemaFileName).Info("Generating JSON-schema for MESSAGE") + + // Convert the message: + messageJSONSchema, err := c.convertMessageType(pkg, msg) + if err != nil { + c.logger.WithError(err).WithField("proto_filename", protoFileName).Error("Failed to convert") + return nil, err + } + + // Marshal the JSON-Schema into JSON: + jsonSchemaJSON, err := json.MarshalIndent(messageJSONSchema, "", " ") + if err != nil { + c.logger.WithError(err).Error("Failed to encode jsonSchema") + return nil, err + } + + // Add a response: + resFile := &plugin.CodeGeneratorResponse_File{ + Name: proto.String(jsonSchemaFileName), + Content: proto.String(string(jsonSchemaJSON)), + } + response = append(response, resFile) + } + } + + return response, nil +} + +func (c *Converter) convert(req *plugin.CodeGeneratorRequest) (*plugin.CodeGeneratorResponse, error) { + c.parseGeneratorParameters(req.GetParameter()) + + generateTargets := make(map[string]bool) + for _, file := range req.GetFileToGenerate() { + generateTargets[file] = true + } + + c.sourceInfo = newSourceCodeInfo(req.GetProtoFile()) + res := &plugin.CodeGeneratorResponse{} + for _, file := range req.GetProtoFile() { + if file.GetPackage() == "" { + c.logger.WithField("filename", file.GetName()).Warn("Proto file doesn't specify a package") + continue + } + + for _, msg := range file.GetMessageType() { + c.logger.WithField("msg_name", msg.GetName()).WithField("package_name", file.GetPackage()).Debug("Loading a message") + c.registerType(file.Package, msg) + } + + for _, en := range file.GetEnumType() { + c.logger.WithField("enum_name", en.GetName()).WithField("package_name", file.GetPackage()).Debug("Loading an enum") + c.registerEnum(file.Package, en) + } + + if _, ok := generateTargets[file.GetName()]; ok { + c.logger.WithField("filename", file.GetName()).Debug("Converting file") + converted, err := c.convertFile(file) + if err != nil { + res.Error = proto.String(fmt.Sprintf("Failed to convert %s: %v", file.GetName(), err)) + return res, err + } + res.File = append(res.File, converted...) + } + } + return res, nil +} + +func (c *Converter) generateSchemaFilename(file *descriptor.FileDescriptorProto, protoName string) string { + if c.PrefixSchemaFilesWithPackage { + return fmt.Sprintf("%s/%s.jsonschema", file.GetPackage(), protoName) + } + return fmt.Sprintf("%s.jsonschema", protoName) +} + +func contains(haystack []string, needle string) bool { + for i := 0; i < len(haystack); i++ { + if haystack[i] == needle { + return true + } + } + + return false +} diff --git a/plugins/restapi/jsonschema/converter/proto_package.go b/plugins/restapi/jsonschema/converter/proto_package.go new file mode 100644 index 0000000000..21ccab955d --- /dev/null +++ b/plugins/restapi/jsonschema/converter/proto_package.go @@ -0,0 +1,167 @@ +package converter + +import ( + "strings" + + "github.com/golang/protobuf/protoc-gen-go/descriptor" +) + +// ProtoPackage describes a package of Protobuf, which is an container of message types. +type ProtoPackage struct { + name string + parent *ProtoPackage + children map[string]*ProtoPackage + types map[string]*descriptor.DescriptorProto + enums map[string]*descriptor.EnumDescriptorProto +} + +func newProtoPackage(parent *ProtoPackage, name string) *ProtoPackage { + pkgName := name + if parent != nil { + pkgName = parent.name + "." + name + } + + return &ProtoPackage{ + name: pkgName, + parent: parent, + children: make(map[string]*ProtoPackage), + types: make(map[string]*descriptor.DescriptorProto), + enums: make(map[string]*descriptor.EnumDescriptorProto), + } +} + +func (c *Converter) lookupType(pkg *ProtoPackage, name string) (*descriptor.DescriptorProto, string, bool) { + if strings.HasPrefix(name, ".") { + return c.relativelyLookupType(globalPkg, name[1:]) + } + + for ; pkg != nil; pkg = pkg.parent { + if desc, pkgName, ok := c.relativelyLookupType(pkg, name); ok { + return desc, pkgName, ok + } + } + return nil, "", false +} + +func (c *Converter) lookupEnum(pkg *ProtoPackage, name string) (*descriptor.EnumDescriptorProto, string, bool) { + if strings.HasPrefix(name, ".") { + return c.relativelyLookupEnum(globalPkg, name[1:]) + } + + for ; pkg != nil; pkg = pkg.parent { + if desc, pkgName, ok := c.relativelyLookupEnum(pkg, name); ok { + return desc, pkgName, ok + } + } + return nil, "", false +} + +func (c *Converter) relativelyLookupType(pkg *ProtoPackage, name string) (*descriptor.DescriptorProto, string, bool) { + components := strings.SplitN(name, ".", 2) + switch len(components) { + case 0: + c.logger.Debug("empty message name") + return nil, "", false + case 1: + found, ok := pkg.types[components[0]] + return found, pkg.name, ok + case 2: + c.logger.Tracef("Looking for %s in %s at %s (%v)", components[1], components[0], pkg.name, pkg) + if child, ok := pkg.children[components[0]]; ok { + found, pkgName, ok := c.relativelyLookupType(child, components[1]) + return found, pkgName, ok + } + if msg, ok := pkg.types[components[0]]; ok { + found, ok := c.relativelyLookupNestedType(msg, components[1]) + return found, pkg.name, ok + } + c.logger.WithField("component", components[0]).WithField("package_name", pkg.name).Debug("No such package nor message in package") + return nil, "", false + default: + c.logger.Error("Failed to lookup type") + return nil, "", false + } +} + +func (c *Converter) relativelyLookupNestedType(desc *descriptor.DescriptorProto, name string) (*descriptor.DescriptorProto, bool) { + components := strings.Split(name, ".") +componentLoop: + for _, component := range components { + for _, nested := range desc.GetNestedType() { + if nested.GetName() == component { + desc = nested + continue componentLoop + } + } + c.logger.WithField("component", component).WithField("description", desc.GetName()).Info("no such nested message") + return nil, false + } + return desc, true +} + +func (c *Converter) relativelyLookupEnum(pkg *ProtoPackage, name string) (*descriptor.EnumDescriptorProto, string, bool) { + components := strings.SplitN(name, ".", 2) + switch len(components) { + case 0: + c.logger.Debug("empty enum name") + return nil, "", false + case 1: + found, ok := pkg.enums[components[0]] + return found, pkg.name, ok + case 2: + c.logger.Tracef("Looking for %s in %s at %s (%v)", components[1], components[0], pkg.name, pkg) + if child, ok := pkg.children[components[0]]; ok { + found, pkgName, ok := c.relativelyLookupEnum(child, components[1]) + return found, pkgName, ok + } + if msg, ok := pkg.types[components[0]]; ok { + found, ok := c.relativelyLookupNestedEnum(msg, components[1]) + return found, pkg.name, ok + } + c.logger.WithField("component", components[0]).WithField("package_name", pkg.name).Debug("No such package nor message in package") + return nil, "", false + default: + c.logger.Error("Failed to lookup type") + return nil, "", false + } +} + +func (c *Converter) relativelyLookupNestedEnum(desc *descriptor.DescriptorProto, name string) (*descriptor.EnumDescriptorProto, bool) { + components := strings.Split(name, ".") + + parent := desc + + if len(components) > 1 { + // The enum is nested inside a potentially nested message definition. + msgComponents := strings.Join(components[0:len(components)-1], ".") + var found bool + parent, found = c.relativelyLookupNestedType(parent, msgComponents) + if !found { + return nil, false + } + } + + // The enum is nested inside of a nested message. We need to dive down the + // tree to find the message the enum is nested in. Then we need to obtain the + // enum. + enumName := components[len(components)-1] + for _, enum := range parent.GetEnumType() { + if enum.GetName() == enumName { + return enum, true + } + } + + return nil, false +} + +func (c *Converter) relativelyLookupPackage(pkg *ProtoPackage, name string) (*ProtoPackage, bool) { + components := strings.Split(name, ".") + for _, c := range components { + var ok bool + pkg, ok = pkg.children[c] + if !ok { + return nil, false + } + } + return pkg, true +} diff --git a/plugins/restapi/jsonschema/converter/sourcecodeinfo.go b/plugins/restapi/jsonschema/converter/sourcecodeinfo.go new file mode 100644 index 0000000000..566a25b164 --- /dev/null +++ b/plugins/restapi/jsonschema/converter/sourcecodeinfo.go @@ -0,0 +1,116 @@ +package converter + +import ( + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/protoc-gen-go/descriptor" +) + +// Protobuf tag values for relevant message fields. Full list here: +// https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto +const ( + tag_FileDescriptor_messageType int32 = 4 + tag_FileDescriptor_enumType int32 = 5 + tag_Descriptor_field int32 = 2 + tag_Descriptor_nestedType int32 = 3 + tag_Descriptor_enumType int32 = 4 + tag_Descriptor_oneofDecl int32 = 8 + tag_EnumDescriptor_value int32 = 2 +) + +type sourceCodeInfo struct { + lookup map[proto.Message]*descriptor.SourceCodeInfo_Location +} + +func (s sourceCodeInfo) GetMessage(m *descriptor.DescriptorProto) *descriptor.SourceCodeInfo_Location { + return s.lookup[m] +} + +func (s sourceCodeInfo) GetField(f *descriptor.FieldDescriptorProto) *descriptor.SourceCodeInfo_Location { + return s.lookup[f] +} + +func (s sourceCodeInfo) GetEnum(e *descriptor.EnumDescriptorProto) *descriptor.SourceCodeInfo_Location { + return s.lookup[e] +} + +func (s sourceCodeInfo) GetEnumValue(e *descriptor.EnumValueDescriptorProto) *descriptor.SourceCodeInfo_Location { + return s.lookup[e] +} + +func newSourceCodeInfo(fs []*descriptor.FileDescriptorProto) *sourceCodeInfo { + // For each source location in the provided files + // - resolve the (annoyingly) encoded path to its message/field/service/enum/etc definition + // - store the source info by its resolved definition + lookup := map[proto.Message]*descriptor.SourceCodeInfo_Location{} + for _, f := range fs { + for _, loc := range f.GetSourceCodeInfo().GetLocation() { + declaration := getDefinitionAtPath(f, loc.Path) + if declaration != nil { + lookup[declaration] = loc + } + } + } + return &sourceCodeInfo{lookup} +} + +// Resolve a protobuf "file-source path" to its associated definition (eg message/field/enum/etc). +// Note that some paths don't point to definitions (some reference subcomponents like name, type, +// field #, etc) and will therefore return nil. +func getDefinitionAtPath(file *descriptor.FileDescriptorProto, path []int32) proto.Message { + // The way protobuf encodes "file-source path" is a little opaque/tricky; + // this doc describes how it works: + // https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto#L730 + + // Starting at the root of the file descriptor, traverse its object graph by following the + // specified path (and updating our position/state at each step) until either: + // - we reach the definition referenced by the path (and return it) + // - we hit a dead end because the path references a grammar element more granular than a + // definition (so we return nil) + var pos proto.Message = file + for step := 0; step < len(path); step++ { + switch p := pos.(type) { + case *descriptor.FileDescriptorProto: + switch path[step] { + case tag_FileDescriptor_messageType: + step++ + pos = p.MessageType[path[step]] + case tag_FileDescriptor_enumType: + step++ + pos = p.EnumType[path[step]] + default: + return nil // ignore all other types + } + + case *descriptor.DescriptorProto: + switch path[step] { + case tag_Descriptor_field: + step++ + pos = p.Field[path[step]] + case tag_Descriptor_nestedType: + step++ + pos = p.NestedType[path[step]] + case tag_Descriptor_enumType: + step++ + pos = p.EnumType[path[step]] + case tag_Descriptor_oneofDecl: + step++ + pos = p.OneofDecl[path[step]] + default: + return nil // ignore all other types + } + + case *descriptor.EnumDescriptorProto: + switch path[step] { + case tag_EnumDescriptor_value: + step++ + pos = p.Value[path[step]] + default: + return nil // ignore all other types + } + + default: + return nil // ignore all other types + } + } + return pos +} diff --git a/plugins/restapi/jsonschema/converter/types.go b/plugins/restapi/jsonschema/converter/types.go new file mode 100644 index 0000000000..e99a2a47ad --- /dev/null +++ b/plugins/restapi/jsonschema/converter/types.go @@ -0,0 +1,536 @@ +package converter + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/alecthomas/jsonschema" + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/protoc-gen-go/descriptor" + "github.com/iancoleman/orderedmap" + "github.com/xeipuuv/gojsonschema" +) + +var ( + globalPkg = newProtoPackage(nil, "") + + wellKnownTypes = map[string]bool{ + "DoubleValue": true, + "FloatValue": true, + "Int64Value": true, + "UInt64Value": true, + "Int32Value": true, + "UInt32Value": true, + "BoolValue": true, + "StringValue": true, + "BytesValue": true, + "Value": true, + } +) + +func (c *Converter) registerEnum(pkgName *string, enum *descriptor.EnumDescriptorProto) { + pkg := globalPkg + if pkgName != nil { + for _, node := range strings.Split(*pkgName, ".") { + if pkg == globalPkg && node == "" { + // Skips leading "." + continue + } + child, ok := pkg.children[node] + if !ok { + child = newProtoPackage(pkg, node) + pkg.children[node] = child + } + pkg = child + } + } + pkg.enums[enum.GetName()] = enum +} + +func (c *Converter) registerType(pkgName *string, msg *descriptor.DescriptorProto) { + pkg := globalPkg + if pkgName != nil { + for _, node := range strings.Split(*pkgName, ".") { + if pkg == globalPkg && node == "" { + // Skips leading "." + continue + } + child, ok := pkg.children[node] + if !ok { + child = newProtoPackage(pkg, node) + pkg.children[node] = child + } + pkg = child + } + } + pkg.types[msg.GetName()] = msg +} + +// Convert a proto "field" (essentially a type-switch with some recursion): +func (c *Converter) convertField(curPkg *ProtoPackage, desc *descriptor.FieldDescriptorProto, msg *descriptor.DescriptorProto, duplicatedMessages map[*descriptor.DescriptorProto]string) (*jsonschema.Type, error) { + // Prepare a new jsonschema.Type for our eventual return value: + jsonSchemaType := &jsonschema.Type{} + + // Generate a description from src comments (if available) + if src := c.sourceInfo.GetField(desc); src != nil { + jsonSchemaType.Description = formatDescription(src) + } + + // Switch the types, and pick a JSONSchema equivalent: + switch desc.GetType() { + case descriptor.FieldDescriptorProto_TYPE_DOUBLE, + descriptor.FieldDescriptorProto_TYPE_FLOAT: + if c.AllowNullValues { + jsonSchemaType.OneOf = []*jsonschema.Type{ + {Type: gojsonschema.TYPE_NULL}, + {Type: gojsonschema.TYPE_NUMBER}, + } + } else { + jsonSchemaType.Type = gojsonschema.TYPE_NUMBER + } + + case descriptor.FieldDescriptorProto_TYPE_INT32, + descriptor.FieldDescriptorProto_TYPE_UINT32, + descriptor.FieldDescriptorProto_TYPE_FIXED32, + descriptor.FieldDescriptorProto_TYPE_SFIXED32, + descriptor.FieldDescriptorProto_TYPE_SINT32: + if c.AllowNullValues { + jsonSchemaType.OneOf = []*jsonschema.Type{ + {Type: gojsonschema.TYPE_NULL}, + {Type: gojsonschema.TYPE_INTEGER}, + } + } else { + jsonSchemaType.Type = gojsonschema.TYPE_INTEGER + } + + case descriptor.FieldDescriptorProto_TYPE_INT64, + descriptor.FieldDescriptorProto_TYPE_UINT64, + descriptor.FieldDescriptorProto_TYPE_FIXED64, + descriptor.FieldDescriptorProto_TYPE_SFIXED64, + descriptor.FieldDescriptorProto_TYPE_SINT64: + if c.AllowNullValues { + jsonSchemaType.OneOf = []*jsonschema.Type{ + {Type: gojsonschema.TYPE_STRING}, + {Type: gojsonschema.TYPE_NULL}, + } + } else { + jsonSchemaType.Type = gojsonschema.TYPE_STRING + } + + if c.DisallowBigIntsAsStrings { + jsonSchemaType.Type = gojsonschema.TYPE_INTEGER + } + + case descriptor.FieldDescriptorProto_TYPE_STRING, + descriptor.FieldDescriptorProto_TYPE_BYTES: + if c.AllowNullValues { + jsonSchemaType.OneOf = []*jsonschema.Type{ + {Type: gojsonschema.TYPE_NULL}, + {Type: gojsonschema.TYPE_STRING}, + } + } else { + jsonSchemaType.Type = gojsonschema.TYPE_STRING + } + + case descriptor.FieldDescriptorProto_TYPE_ENUM: + // Note: not setting type specification(oneof string and integer), because explicitly saying which + // values are valid (and any other is invalid) is enough specification what can be used + // (this also overcome bug in example creator https://json-schema-faker.js.org/ that doesn't select + // correct type for enum value but rather chooses random type from oneof and cast value to that type) + // + //jsonSchemaType.OneOf = append(jsonSchemaType.OneOf, &jsonschema.Type{Type: gojsonschema.TYPE_STRING}) + //jsonSchemaType.OneOf = append(jsonSchemaType.OneOf, &jsonschema.Type{Type: gojsonschema.TYPE_INTEGER}) + if c.AllowNullValues { + jsonSchemaType.OneOf = append(jsonSchemaType.OneOf, &jsonschema.Type{Type: gojsonschema.TYPE_NULL}) + } + + // Go through all the enums we have, see if we can match any to this field. + fullEnumIdentifier := strings.TrimPrefix(desc.GetTypeName(), ".") + matchedEnum, _, ok := c.lookupEnum(curPkg, fullEnumIdentifier) + if !ok { + return nil, fmt.Errorf("unable to resolve enum type: %s", desc.GetType().String()) + } + + // We have found an enum, append its values. + for _, value := range matchedEnum.Value { + jsonSchemaType.Enum = append(jsonSchemaType.Enum, value.Name) + jsonSchemaType.Enum = append(jsonSchemaType.Enum, value.Number) + } + + case descriptor.FieldDescriptorProto_TYPE_BOOL: + if c.AllowNullValues { + jsonSchemaType.OneOf = []*jsonschema.Type{ + {Type: gojsonschema.TYPE_NULL}, + {Type: gojsonschema.TYPE_BOOLEAN}, + } + } else { + jsonSchemaType.Type = gojsonschema.TYPE_BOOLEAN + } + + case descriptor.FieldDescriptorProto_TYPE_GROUP, descriptor.FieldDescriptorProto_TYPE_MESSAGE: + switch desc.GetTypeName() { + case ".google.protobuf.Timestamp": + jsonSchemaType.Type = gojsonschema.TYPE_STRING + jsonSchemaType.Format = "date-time" + default: + jsonSchemaType.Type = gojsonschema.TYPE_OBJECT + if desc.GetLabel() == descriptor.FieldDescriptorProto_LABEL_OPTIONAL { + jsonSchemaType.AdditionalProperties = []byte("true") + } + if desc.GetLabel() == descriptor.FieldDescriptorProto_LABEL_REQUIRED { + jsonSchemaType.AdditionalProperties = []byte("false") + } + } + + default: + return nil, fmt.Errorf("unrecognized field type: %s", desc.GetType().String()) + } + + // Recurse array of primitive types: + if desc.GetLabel() == descriptor.FieldDescriptorProto_LABEL_REPEATED && jsonSchemaType.Type != gojsonschema.TYPE_OBJECT { + jsonSchemaType.Items = &jsonschema.Type{} + + if len(jsonSchemaType.Enum) > 0 { + jsonSchemaType.Items.Enum = jsonSchemaType.Enum + jsonSchemaType.Enum = nil + jsonSchemaType.Items.OneOf = nil + } else { + jsonSchemaType.Items.Type = jsonSchemaType.Type + jsonSchemaType.Items.OneOf = jsonSchemaType.OneOf + } + + if c.AllowNullValues { + jsonSchemaType.OneOf = []*jsonschema.Type{ + {Type: gojsonschema.TYPE_NULL}, + {Type: gojsonschema.TYPE_ARRAY}, + } + } else { + jsonSchemaType.Type = gojsonschema.TYPE_ARRAY + jsonSchemaType.OneOf = []*jsonschema.Type{} + } + return jsonSchemaType, nil + } + + // Recurse nested objects / arrays of objects (if necessary): + if jsonSchemaType.Type == gojsonschema.TYPE_OBJECT { + + recordType, pkgName, ok := c.lookupType(curPkg, desc.GetTypeName()) + if !ok { + return nil, fmt.Errorf("no such message type named %s", desc.GetTypeName()) + } + + // Recurse the recordType: + recursedJSONSchemaType, err := c.recursiveConvertMessageType(curPkg, recordType, pkgName, duplicatedMessages, false) + if err != nil { + return nil, err + } + + // Maps, arrays, and objects are structured in different ways: + switch { + + // Maps: + case recordType.Options.GetMapEntry(): + c.logger. + WithField("field_name", recordType.GetName()). + WithField("msg_name", *msg.Name). + Tracef("Is a map") + + // Make sure we have a "value": + value, valuePresent := recursedJSONSchemaType.Properties.Get("value") + if !valuePresent { + return nil, fmt.Errorf("Unable to find 'value' property of MAP type") + } + + // Marshal the "value" properties to JSON (because that's how we can pass on AdditionalProperties): + additionalPropertiesJSON, err := json.Marshal(value) + if err != nil { + return nil, err + } + jsonSchemaType.AdditionalProperties = additionalPropertiesJSON + + // Arrays: + case desc.GetLabel() == descriptor.FieldDescriptorProto_LABEL_REPEATED: + jsonSchemaType.Items = recursedJSONSchemaType + jsonSchemaType.Type = gojsonschema.TYPE_ARRAY + + // Build up the list of required fields: + if c.AllFieldsRequired && recursedJSONSchemaType.Properties != nil { + for _, property := range recursedJSONSchemaType.Properties.Keys() { + jsonSchemaType.Items.Required = append(jsonSchemaType.Items.Required, property) + } + } + + // Not maps, not arrays: + default: + + // If we've got optional types then just take those: + if recursedJSONSchemaType.OneOf != nil { + return recursedJSONSchemaType, nil + } + + // If we're not an object then set the type from whatever we recursed: + if recursedJSONSchemaType.Type != gojsonschema.TYPE_OBJECT { + jsonSchemaType.Type = recursedJSONSchemaType.Type + } + + // Assume the attrbutes of the recursed value: + jsonSchemaType.Properties = recursedJSONSchemaType.Properties + jsonSchemaType.Ref = recursedJSONSchemaType.Ref + jsonSchemaType.Required = recursedJSONSchemaType.Required + + // Build up the list of required fields: + if c.AllFieldsRequired && recursedJSONSchemaType.Properties != nil { + for _, property := range recursedJSONSchemaType.Properties.Keys() { + jsonSchemaType.Required = append(jsonSchemaType.Required, property) + } + } + } + + // Optionally allow NULL values: + if c.AllowNullValues { + jsonSchemaType.OneOf = []*jsonschema.Type{ + {Type: gojsonschema.TYPE_NULL}, + {Type: jsonSchemaType.Type}, + } + jsonSchemaType.Type = "" + } + } + + jsonSchemaType.Required = dedupe(jsonSchemaType.Required) + + return jsonSchemaType, nil +} + +// Converts a proto "MESSAGE" into a JSON-Schema: +func (c *Converter) convertMessageType(curPkg *ProtoPackage, msg *descriptor.DescriptorProto) (*jsonschema.Schema, error) { + + // first, recursively find messages that appear more than once - in particular, that will break cycles + duplicatedMessages, err := c.findDuplicatedNestedMessages(curPkg, msg) + if err != nil { + return nil, err + } + + // main schema for the message + rootType, err := c.recursiveConvertMessageType(curPkg, msg, "", duplicatedMessages, false) + if err != nil { + return nil, err + } + + // and then generate the sub-schema for each duplicated message + definitions := jsonschema.Definitions{} + for refMsg, name := range duplicatedMessages { + refType, err := c.recursiveConvertMessageType(curPkg, refMsg, "", duplicatedMessages, true) + if err != nil { + return nil, err + } + + // need to give that schema an ID + if refType.Extras == nil { + refType.Extras = make(map[string]interface{}) + } + refType.Extras["id"] = name + definitions[name] = refType + } + + newJSONSchema := &jsonschema.Schema{ + Type: rootType, + Definitions: definitions, + } + + // Look for required fields (either by proto2 required flag, or the AllFieldsRequired option): + for _, fieldDesc := range msg.GetField() { + if c.AllFieldsRequired || fieldDesc.GetLabel() == descriptor.FieldDescriptorProto_LABEL_REQUIRED { + newJSONSchema.Required = append(newJSONSchema.Required, fieldDesc.GetName()) + } + } + + newJSONSchema.Required = dedupe(newJSONSchema.Required) + + return newJSONSchema, nil +} + +// findDuplicatedNestedMessages takes a message, and returns a map mapping pointers to messages that appear more than once +// (typically because they're part of a reference cycle) to the sub-schema name that we give them. +func (c *Converter) findDuplicatedNestedMessages(curPkg *ProtoPackage, msg *descriptor.DescriptorProto) (map[*descriptor.DescriptorProto]string, error) { + all := make(map[*descriptor.DescriptorProto]*nameAndCounter) + if err := c.recursiveFindDuplicatedNestedMessages(curPkg, msg, msg.GetName(), all); err != nil { + return nil, err + } + + result := make(map[*descriptor.DescriptorProto]string) + for m, nameAndCounter := range all { + if nameAndCounter.counter > 1 && !strings.HasPrefix(nameAndCounter.name, ".google.protobuf.") { + result[m] = strings.TrimLeft(nameAndCounter.name, ".") + } + } + + return result, nil +} + +type nameAndCounter struct { + name string + counter int +} + +func (c *Converter) recursiveFindDuplicatedNestedMessages(curPkg *ProtoPackage, msg *descriptor.DescriptorProto, typeName string, alreadySeen map[*descriptor.DescriptorProto]*nameAndCounter) error { + if nameAndCounter, present := alreadySeen[msg]; present { + nameAndCounter.counter++ + return nil + } + alreadySeen[msg] = &nameAndCounter{ + name: typeName, + counter: 1, + } + + for _, desc := range msg.GetField() { + descType := desc.GetType() + if descType != descriptor.FieldDescriptorProto_TYPE_MESSAGE && descType != descriptor.FieldDescriptorProto_TYPE_GROUP { + // no nested messages + continue + } + + typeName := desc.GetTypeName() + recordType, _, ok := c.lookupType(curPkg, typeName) + if !ok { + return fmt.Errorf("no such message type named %s", typeName) + } + if err := c.recursiveFindDuplicatedNestedMessages(curPkg, recordType, typeName, alreadySeen); err != nil { + return err + } + } + + return nil +} + +func (c *Converter) recursiveConvertMessageType(curPkg *ProtoPackage, msg *descriptor.DescriptorProto, pkgName string, duplicatedMessages map[*descriptor.DescriptorProto]string, ignoreDuplicatedMessages bool) (*jsonschema.Type, error) { + + // Handle google's well-known types: + if msg.Name != nil && wellKnownTypes[*msg.Name] && pkgName == ".google.protobuf" { + var schemaType string + switch *msg.Name { + case "DoubleValue", "FloatValue": + schemaType = gojsonschema.TYPE_NUMBER + case "Int32Value", "UInt32Value", "Int64Value", "UInt64Value": + schemaType = gojsonschema.TYPE_INTEGER + case "BoolValue": + schemaType = gojsonschema.TYPE_BOOLEAN + case "BytesValue", "StringValue": + schemaType = gojsonschema.TYPE_STRING + case "Value": + schemaType = gojsonschema.TYPE_OBJECT + } + + // If we're allowing nulls then prepare a OneOf: + if c.AllowNullValues { + return &jsonschema.Type{ + OneOf: []*jsonschema.Type{ + {Type: gojsonschema.TYPE_NULL}, + {Type: schemaType}, + }, + }, nil + } + + // Otherwise just return this simple type: + return &jsonschema.Type{ + Type: schemaType, + }, nil + } + + if refName, ok := duplicatedMessages[msg]; ok && !ignoreDuplicatedMessages { + return &jsonschema.Type{ + Version: jsonschema.Version, + Ref: refName, + }, nil + } + + // Prepare a new jsonschema: + jsonSchemaType := &jsonschema.Type{ + Properties: orderedmap.New(), + Version: jsonschema.Version, + } + + // Generate a description from src comments (if available) + if src := c.sourceInfo.GetMessage(msg); src != nil { + jsonSchemaType.Description = formatDescription(src) + } + + // Optionally allow NULL values: + if c.AllowNullValues { + jsonSchemaType.OneOf = []*jsonschema.Type{ + {Type: gojsonschema.TYPE_NULL}, + {Type: gojsonschema.TYPE_OBJECT}, + } + } else { + jsonSchemaType.Type = gojsonschema.TYPE_OBJECT + } + + // disallowAdditionalProperties will prevent validation where extra fields are found (outside of the schema): + if c.DisallowAdditionalProperties { + jsonSchemaType.AdditionalProperties = []byte("false") + } else { + jsonSchemaType.AdditionalProperties = []byte("true") + } + + c.logger.WithField("message_str", proto.MarshalTextString(msg)).Trace("Converting message") + for _, fieldDesc := range msg.GetField() { + recursedJSONSchemaType, err := c.convertField(curPkg, fieldDesc, msg, duplicatedMessages) + if err != nil { + c.logger.WithError(err).WithField("field_name", fieldDesc.GetName()).WithField("message_name", msg.GetName()).Error("Failed to convert field") + return nil, err + } + c.logger.WithField("field_name", fieldDesc.GetName()).WithField("type", recursedJSONSchemaType.Type).Trace("Converted field") + + // Figure out which field names we want to use: + switch { + case c.UseJSONFieldnamesOnly: + jsonSchemaType.Properties.Set(fieldDesc.GetJsonName(), recursedJSONSchemaType) + case c.UseProtoAndJSONFieldnames: + jsonSchemaType.Properties.Set(fieldDesc.GetName(), recursedJSONSchemaType) + jsonSchemaType.Properties.Set(fieldDesc.GetJsonName(), recursedJSONSchemaType) + default: + jsonSchemaType.Properties.Set(fieldDesc.GetName(), recursedJSONSchemaType) + } + + // Look for required fields (either by proto2 required flag, or the AllFieldsRequired option): + if fieldDesc.GetLabel() == descriptor.FieldDescriptorProto_LABEL_REQUIRED { + jsonSchemaType.Required = append(jsonSchemaType.Required, fieldDesc.GetName()) + } + } + + // Remove empty properties to keep the final output as clean as possible: + if len(jsonSchemaType.Properties.Keys()) == 0 { + jsonSchemaType.Properties = nil + } + + return jsonSchemaType, nil +} + +func formatDescription(sl *descriptor.SourceCodeInfo_Location) string { + var lines []string + for _, str := range sl.GetLeadingDetachedComments() { + if s := strings.TrimSpace(str); s != "" { + lines = append(lines, s) + } + } + if s := strings.TrimSpace(sl.GetLeadingComments()); s != "" { + lines = append(lines, s) + } + if s := strings.TrimSpace(sl.GetTrailingComments()); s != "" { + lines = append(lines, s) + } + return strings.Join(lines, "\n\n") +} + +func dedupe(inputStrings []string) []string { + appended := make(map[string]bool) + outputStrings := []string{} + + for _, inputString := range inputStrings { + if !appended[inputString] { + outputStrings = append(outputStrings, inputString) + appended[inputString] = true + } + } + return outputStrings +} diff --git a/plugins/restapi/plugin_restapi.go b/plugins/restapi/plugin_restapi.go index 452473f464..2d7da82733 100644 --- a/plugins/restapi/plugin_restapi.go +++ b/plugins/restapi/plugin_restapi.go @@ -68,14 +68,14 @@ type Plugin struct { vpeHandler vpevppcalls.VppCoreAPI teleHandler telemetryvppcalls.TelemetryVppAPI // VPP Handlers - abfHandler abfvppcalls.ABFVppRead - aclHandler aclvppcalls.ACLVppRead - ifHandler ifvppcalls.InterfaceVppRead - natHandler natvppcalls.NatVppRead - l2Handler l2vppcalls.L2VppAPI - l3Handler l3vppcalls.L3VppAPI - ipSecHandler ipsecvppcalls.IPSecVPPRead - puntHandler puntvppcalls.PuntVPPRead + abfHandler abfvppcalls.ABFVppRead + aclHandler aclvppcalls.ACLVppRead + ifHandler ifvppcalls.InterfaceVppRead + natHandler natvppcalls.NatVppRead + l2Handler l2vppcalls.L2VppAPI + l3Handler l3vppcalls.L3VppAPI + ipSecHandler ipsecvppcalls.IPSecVPPRead + puntHandler puntvppcalls.PuntVPPRead wireguardHandler wireguardvppcalls.WgVppRead // Linux handlers linuxIfHandler iflinuxcalls.NetlinkAPIRead @@ -224,6 +224,7 @@ func getIndexPageItems() map[string][]indexItem { idxMap := map[string][]indexItem{ "Info": { {Name: "Version", Path: resturl.Version}, + {Name: "JSONSchema", Path: resturl.JSONSchema}, }, "ACL plugin": { {Name: "IP-type access lists", Path: resturl.ACLIP}, @@ -270,6 +271,7 @@ func getPermissionsGroups() []*access.PermissionGroup { Permissions: []*access.PermissionGroup_Permissions{ newPermission("/", GET), newPermission(resturl.Version, GET), + newPermission(resturl.JSONSchema, GET), }, } tracerPg := &access.PermissionGroup{ diff --git a/plugins/restapi/resturl/urls.go b/plugins/restapi/resturl/urls.go index f6db401cb7..bc546e4549 100644 --- a/plugins/restapi/resturl/urls.go +++ b/plugins/restapi/resturl/urls.go @@ -18,6 +18,10 @@ package resturl const ( // Version is a path for retrieving information about version of Agent. Version = "/info/version" + + // JSONSchema is a path for retrieving JSON Schema for VPP-Agent configuration (dynamically created + // container of all registered configuration models). + JSONSchema = "/info/configuration/jsonschema" ) // Linux Dumps