Committing vendored dependencies and generated protos
Change-Id: I349c149b513d9de7d9f60bde2c954a939da2fc54
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/BUILD.bazel
new file mode 100644
index 0000000..cb772ef
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/BUILD.bazel
@@ -0,0 +1,45 @@
+load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library")
+load("@io_bazel_rules_go//proto:compiler.bzl", "go_proto_compiler")
+
+package(default_visibility = ["//visibility:private"])
+
+go_library(
+ name = "go_default_library",
+ srcs = ["main.go"],
+ importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway",
+ deps = [
+ "//codegenerator:go_default_library",
+ "//protoc-gen-grpc-gateway/descriptor:go_default_library",
+ "//protoc-gen-grpc-gateway/gengateway:go_default_library",
+ "@com_github_golang_glog//:go_default_library",
+ "@com_github_golang_protobuf//proto:go_default_library",
+ "@io_bazel_rules_go//proto/wkt:compiler_plugin_go_proto",
+ ],
+)
+
+go_binary(
+ name = "protoc-gen-grpc-gateway",
+ embed = [":go_default_library"],
+ visibility = ["//visibility:public"],
+)
+
+go_proto_compiler(
+ name = "go_gen_grpc_gateway",
+ options = [
+ "logtostderr=true",
+ "allow_repeated_fields_in_body=true",
+ ],
+ plugin = ":protoc-gen-grpc-gateway",
+ suffix = ".pb.gw.go",
+ visibility = ["//visibility:public"],
+ deps = [
+ "//runtime:go_default_library",
+ "//utilities:go_default_library",
+ "@com_github_golang_protobuf//proto:go_default_library",
+ "@org_golang_google_grpc//:go_default_library",
+ "@org_golang_google_grpc//codes:go_default_library",
+ "@org_golang_google_grpc//grpclog:go_default_library",
+ "@org_golang_google_grpc//status:go_default_library",
+ "@org_golang_x_net//context:go_default_library",
+ ],
+)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/BUILD.bazel
new file mode 100644
index 0000000..cfbdc27
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/BUILD.bazel
@@ -0,0 +1,44 @@
+load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
+
+package(default_visibility = ["//:generators"])
+
+go_library(
+ name = "go_default_library",
+ srcs = [
+ "grpc_api_configuration.go",
+ "grpc_api_service.go",
+ "registry.go",
+ "services.go",
+ "types.go",
+ ],
+ importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor",
+ deps = [
+ "//protoc-gen-grpc-gateway/httprule:go_default_library",
+ "@com_github_ghodss_yaml//:go_default_library",
+ "@com_github_golang_glog//:go_default_library",
+ "@com_github_golang_protobuf//jsonpb:go_default_library_gen",
+ "@com_github_golang_protobuf//proto:go_default_library",
+ "@com_github_golang_protobuf//protoc-gen-go/generator:go_default_library_gen",
+ "@go_googleapis//google/api:annotations_go_proto",
+ "@io_bazel_rules_go//proto/wkt:compiler_plugin_go_proto",
+ "@io_bazel_rules_go//proto/wkt:descriptor_go_proto",
+ ],
+)
+
+go_test(
+ name = "go_default_test",
+ size = "small",
+ srcs = [
+ "grpc_api_configuration_test.go",
+ "registry_test.go",
+ "services_test.go",
+ "types_test.go",
+ ],
+ embed = [":go_default_library"],
+ deps = [
+ "//protoc-gen-grpc-gateway/httprule:go_default_library",
+ "@com_github_golang_protobuf//proto:go_default_library",
+ "@io_bazel_rules_go//proto/wkt:compiler_plugin_go_proto",
+ "@io_bazel_rules_go//proto/wkt:descriptor_go_proto",
+ ],
+)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/grpc_api_configuration.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/grpc_api_configuration.go
new file mode 100644
index 0000000..ca68ed7
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/grpc_api_configuration.go
@@ -0,0 +1,71 @@
+package descriptor
+
+import (
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "strings"
+
+ "github.com/ghodss/yaml"
+ "github.com/golang/protobuf/jsonpb"
+)
+
+func loadGrpcAPIServiceFromYAML(yamlFileContents []byte, yamlSourceLogName string) (*GrpcAPIService, error) {
+ jsonContents, err := yaml.YAMLToJSON(yamlFileContents)
+ if err != nil {
+ return nil, fmt.Errorf("Failed to convert gRPC API Configuration from YAML in '%v' to JSON: %v", yamlSourceLogName, err)
+ }
+
+ // As our GrpcAPIService is incomplete accept unkown fields.
+ unmarshaler := jsonpb.Unmarshaler{
+ AllowUnknownFields: true,
+ }
+
+ serviceConfiguration := GrpcAPIService{}
+ if err := unmarshaler.Unmarshal(bytes.NewReader(jsonContents), &serviceConfiguration); err != nil {
+ return nil, fmt.Errorf("Failed to parse gRPC API Configuration from YAML in '%v': %v", yamlSourceLogName, err)
+ }
+
+ return &serviceConfiguration, nil
+}
+
+func registerHTTPRulesFromGrpcAPIService(registry *Registry, service *GrpcAPIService, sourceLogName string) error {
+ if service.HTTP == nil {
+ // Nothing to do
+ return nil
+ }
+
+ for _, rule := range service.HTTP.GetRules() {
+ selector := "." + strings.Trim(rule.GetSelector(), " ")
+ if strings.ContainsAny(selector, "*, ") {
+ return fmt.Errorf("Selector '%v' in %v must specify a single service method without wildcards", rule.GetSelector(), sourceLogName)
+ }
+
+ registry.AddExternalHTTPRule(selector, rule)
+ }
+
+ return nil
+}
+
+// LoadGrpcAPIServiceFromYAML loads a gRPC API Configuration from the given YAML file
+// and registers the HttpRule descriptions contained in it as externalHTTPRules in
+// the given registry. This must be done before loading the proto file.
+//
+// You can learn more about gRPC API Service descriptions from google's documentation
+// at https://cloud.google.com/endpoints/docs/grpc/grpc-service-config
+//
+// Note that for the purposes of the gateway generator we only consider a subset of all
+// available features google supports in their service descriptions.
+func (r *Registry) LoadGrpcAPIServiceFromYAML(yamlFile string) error {
+ yamlFileContents, err := ioutil.ReadFile(yamlFile)
+ if err != nil {
+ return fmt.Errorf("Failed to read gRPC API Configuration description from '%v': %v", yamlFile, err)
+ }
+
+ service, err := loadGrpcAPIServiceFromYAML(yamlFileContents, yamlFile)
+ if err != nil {
+ return err
+ }
+
+ return registerHTTPRulesFromGrpcAPIService(r, service, yamlFile)
+}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/grpc_api_service.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/grpc_api_service.go
new file mode 100644
index 0000000..75b8240
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/grpc_api_service.go
@@ -0,0 +1,31 @@
+package descriptor
+
+import (
+ "github.com/golang/protobuf/proto"
+ "google.golang.org/genproto/googleapis/api/annotations"
+)
+
+// GrpcAPIService represents a stripped down version of google.api.Service .
+// Compare to https://github.com/googleapis/googleapis/blob/master/google/api/service.proto
+// The original imports 23 other protobuf files we are not interested in. If a significant
+// subset (>50%) of these start being reproduced in this file we should swap to using the
+// full generated version instead.
+//
+// For the purposes of the gateway generator we only consider a small subset of all
+// available features google supports in their service descriptions. Thanks to backwards
+// compatibility guarantees by protobuf it is safe for us to remove the other fields.
+// We also only implement the absolute minimum of protobuf generator boilerplate to use
+// our simplified version. These should be pretty stable too.
+type GrpcAPIService struct {
+ // Http Rule. Named Http in the actual proto. Changed to suppress linter warning.
+ HTTP *annotations.Http `protobuf:"bytes,9,opt,name=http" json:"http,omitempty"`
+}
+
+// ProtoMessage returns an empty GrpcAPIService element
+func (*GrpcAPIService) ProtoMessage() {}
+
+// Reset resets the GrpcAPIService
+func (m *GrpcAPIService) Reset() { *m = GrpcAPIService{} }
+
+// String returns the string representation of the GrpcAPIService
+func (m *GrpcAPIService) String() string { return proto.CompactTextString(m) }
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry.go
new file mode 100644
index 0000000..2f05636
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry.go
@@ -0,0 +1,498 @@
+package descriptor
+
+import (
+ "fmt"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/golang/glog"
+ descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor"
+ plugin "github.com/golang/protobuf/protoc-gen-go/plugin"
+ "google.golang.org/genproto/googleapis/api/annotations"
+)
+
+// Registry is a registry of information extracted from plugin.CodeGeneratorRequest.
+type Registry struct {
+ // msgs is a mapping from fully-qualified message name to descriptor
+ msgs map[string]*Message
+
+ // enums is a mapping from fully-qualified enum name to descriptor
+ enums map[string]*Enum
+
+ // files is a mapping from file path to descriptor
+ files map[string]*File
+
+ // prefix is a prefix to be inserted to golang package paths generated from proto package names.
+ prefix string
+
+ // importPath is used as the package if no input files declare go_package. If it contains slashes, everything up to the rightmost slash is ignored.
+ importPath string
+
+ // pkgMap is a user-specified mapping from file path to proto package.
+ pkgMap map[string]string
+
+ // pkgAliases is a mapping from package aliases to package paths in go which are already taken.
+ pkgAliases map[string]string
+
+ // allowDeleteBody permits http delete methods to have a body
+ allowDeleteBody bool
+
+ // externalHttpRules is a mapping from fully qualified service method names to additional HttpRules applicable besides the ones found in annotations.
+ externalHTTPRules map[string][]*annotations.HttpRule
+
+ // allowMerge generation one swagger file out of multiple protos
+ allowMerge bool
+
+ // mergeFileName target swagger file name after merge
+ mergeFileName string
+
+ // allowRepeatedFieldsInBody permits repeated field in body field path of `google.api.http` annotation option
+ allowRepeatedFieldsInBody bool
+
+ // includePackageInTags controls whether the package name defined in the `package` directive
+ // in the proto file can be prepended to the gRPC service name in the `Tags` field of every operation.
+ includePackageInTags bool
+
+ // repeatedPathParamSeparator specifies how path parameter repeated fields are separated
+ repeatedPathParamSeparator repeatedFieldSeparator
+
+ // useJSONNamesForFields if true json tag name is used for generating fields in swagger definitions,
+ // otherwise the original proto name is used. It's helpful for synchronizing the swagger definition
+ // with grpc-gateway response, if it uses json tags for marshaling.
+ useJSONNamesForFields bool
+
+ // useFQNForSwaggerName if true swagger names will use the full qualified name (FQN) from proto definition,
+ // and generate a dot-separated swagger name concatenating all elements from the proto FQN.
+ // If false, the default behavior is to concat the last 2 elements of the FQN if they are unique, otherwise concat
+ // all the elements of the FQN without any separator
+ useFQNForSwaggerName bool
+
+ // allowColonFinalSegments determines whether colons are permitted
+ // in the final segment of a path.
+ allowColonFinalSegments bool
+}
+
+type repeatedFieldSeparator struct {
+ name string
+ sep rune
+}
+
+// NewRegistry returns a new Registry.
+func NewRegistry() *Registry {
+ return &Registry{
+ msgs: make(map[string]*Message),
+ enums: make(map[string]*Enum),
+ files: make(map[string]*File),
+ pkgMap: make(map[string]string),
+ pkgAliases: make(map[string]string),
+ externalHTTPRules: make(map[string][]*annotations.HttpRule),
+ repeatedPathParamSeparator: repeatedFieldSeparator{
+ name: "csv",
+ sep: ',',
+ },
+ }
+}
+
+// Load loads definitions of services, methods, messages, enumerations and fields from "req".
+func (r *Registry) Load(req *plugin.CodeGeneratorRequest) error {
+ for _, file := range req.GetProtoFile() {
+ r.loadFile(file)
+ }
+
+ var targetPkg string
+ for _, name := range req.FileToGenerate {
+ target := r.files[name]
+ if target == nil {
+ return fmt.Errorf("no such file: %s", name)
+ }
+ name := r.packageIdentityName(target.FileDescriptorProto)
+ if targetPkg == "" {
+ targetPkg = name
+ } else {
+ if targetPkg != name {
+ return fmt.Errorf("inconsistent package names: %s %s", targetPkg, name)
+ }
+ }
+
+ if err := r.loadServices(target); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// loadFile loads messages, enumerations and fields from "file".
+// It does not loads services and methods in "file". You need to call
+// loadServices after loadFiles is called for all files to load services and methods.
+func (r *Registry) loadFile(file *descriptor.FileDescriptorProto) {
+ pkg := GoPackage{
+ Path: r.goPackagePath(file),
+ Name: r.defaultGoPackageName(file),
+ }
+ if err := r.ReserveGoPackageAlias(pkg.Name, pkg.Path); err != nil {
+ for i := 0; ; i++ {
+ alias := fmt.Sprintf("%s_%d", pkg.Name, i)
+ if err := r.ReserveGoPackageAlias(alias, pkg.Path); err == nil {
+ pkg.Alias = alias
+ break
+ }
+ }
+ }
+ f := &File{
+ FileDescriptorProto: file,
+ GoPkg: pkg,
+ }
+
+ r.files[file.GetName()] = f
+ r.registerMsg(f, nil, file.GetMessageType())
+ r.registerEnum(f, nil, file.GetEnumType())
+}
+
+func (r *Registry) registerMsg(file *File, outerPath []string, msgs []*descriptor.DescriptorProto) {
+ for i, md := range msgs {
+ m := &Message{
+ File: file,
+ Outers: outerPath,
+ DescriptorProto: md,
+ Index: i,
+ }
+ for _, fd := range md.GetField() {
+ m.Fields = append(m.Fields, &Field{
+ Message: m,
+ FieldDescriptorProto: fd,
+ })
+ }
+ file.Messages = append(file.Messages, m)
+ r.msgs[m.FQMN()] = m
+ glog.V(1).Infof("register name: %s", m.FQMN())
+
+ var outers []string
+ outers = append(outers, outerPath...)
+ outers = append(outers, m.GetName())
+ r.registerMsg(file, outers, m.GetNestedType())
+ r.registerEnum(file, outers, m.GetEnumType())
+ }
+}
+
+func (r *Registry) registerEnum(file *File, outerPath []string, enums []*descriptor.EnumDescriptorProto) {
+ for i, ed := range enums {
+ e := &Enum{
+ File: file,
+ Outers: outerPath,
+ EnumDescriptorProto: ed,
+ Index: i,
+ }
+ file.Enums = append(file.Enums, e)
+ r.enums[e.FQEN()] = e
+ glog.V(1).Infof("register enum name: %s", e.FQEN())
+ }
+}
+
+// LookupMsg looks up a message type by "name".
+// It tries to resolve "name" from "location" if "name" is a relative message name.
+func (r *Registry) LookupMsg(location, name string) (*Message, error) {
+ glog.V(1).Infof("lookup %s from %s", name, location)
+ if strings.HasPrefix(name, ".") {
+ m, ok := r.msgs[name]
+ if !ok {
+ return nil, fmt.Errorf("no message found: %s", name)
+ }
+ return m, nil
+ }
+
+ if !strings.HasPrefix(location, ".") {
+ location = fmt.Sprintf(".%s", location)
+ }
+ components := strings.Split(location, ".")
+ for len(components) > 0 {
+ fqmn := strings.Join(append(components, name), ".")
+ if m, ok := r.msgs[fqmn]; ok {
+ return m, nil
+ }
+ components = components[:len(components)-1]
+ }
+ return nil, fmt.Errorf("no message found: %s", name)
+}
+
+// LookupEnum looks up a enum type by "name".
+// It tries to resolve "name" from "location" if "name" is a relative enum name.
+func (r *Registry) LookupEnum(location, name string) (*Enum, error) {
+ glog.V(1).Infof("lookup enum %s from %s", name, location)
+ if strings.HasPrefix(name, ".") {
+ e, ok := r.enums[name]
+ if !ok {
+ return nil, fmt.Errorf("no enum found: %s", name)
+ }
+ return e, nil
+ }
+
+ if !strings.HasPrefix(location, ".") {
+ location = fmt.Sprintf(".%s", location)
+ }
+ components := strings.Split(location, ".")
+ for len(components) > 0 {
+ fqen := strings.Join(append(components, name), ".")
+ if e, ok := r.enums[fqen]; ok {
+ return e, nil
+ }
+ components = components[:len(components)-1]
+ }
+ return nil, fmt.Errorf("no enum found: %s", name)
+}
+
+// LookupFile looks up a file by name.
+func (r *Registry) LookupFile(name string) (*File, error) {
+ f, ok := r.files[name]
+ if !ok {
+ return nil, fmt.Errorf("no such file given: %s", name)
+ }
+ return f, nil
+}
+
+// LookupExternalHTTPRules looks up external http rules by fully qualified service method name
+func (r *Registry) LookupExternalHTTPRules(qualifiedMethodName string) []*annotations.HttpRule {
+ return r.externalHTTPRules[qualifiedMethodName]
+}
+
+// AddExternalHTTPRule adds an external http rule for the given fully qualified service method name
+func (r *Registry) AddExternalHTTPRule(qualifiedMethodName string, rule *annotations.HttpRule) {
+ r.externalHTTPRules[qualifiedMethodName] = append(r.externalHTTPRules[qualifiedMethodName], rule)
+}
+
+// AddPkgMap adds a mapping from a .proto file to proto package name.
+func (r *Registry) AddPkgMap(file, protoPkg string) {
+ r.pkgMap[file] = protoPkg
+}
+
+// SetPrefix registers the prefix to be added to go package paths generated from proto package names.
+func (r *Registry) SetPrefix(prefix string) {
+ r.prefix = prefix
+}
+
+// SetImportPath registers the importPath which is used as the package if no
+// input files declare go_package. If it contains slashes, everything up to the
+// rightmost slash is ignored.
+func (r *Registry) SetImportPath(importPath string) {
+ r.importPath = importPath
+}
+
+// ReserveGoPackageAlias reserves the unique alias of go package.
+// If succeeded, the alias will be never used for other packages in generated go files.
+// If failed, the alias is already taken by another package, so you need to use another
+// alias for the package in your go files.
+func (r *Registry) ReserveGoPackageAlias(alias, pkgpath string) error {
+ if taken, ok := r.pkgAliases[alias]; ok {
+ if taken == pkgpath {
+ return nil
+ }
+ return fmt.Errorf("package name %s is already taken. Use another alias", alias)
+ }
+ r.pkgAliases[alias] = pkgpath
+ return nil
+}
+
+// goPackagePath returns the go package path which go files generated from "f" should have.
+// It respects the mapping registered by AddPkgMap if exists. Or use go_package as import path
+// if it includes a slash, Otherwide, it generates a path from the file name of "f".
+func (r *Registry) goPackagePath(f *descriptor.FileDescriptorProto) string {
+ name := f.GetName()
+ if pkg, ok := r.pkgMap[name]; ok {
+ return path.Join(r.prefix, pkg)
+ }
+
+ gopkg := f.Options.GetGoPackage()
+ idx := strings.LastIndex(gopkg, "/")
+ if idx >= 0 {
+ if sc := strings.LastIndex(gopkg, ";"); sc > 0 {
+ gopkg = gopkg[:sc+1-1]
+ }
+ return gopkg
+ }
+
+ return path.Join(r.prefix, path.Dir(name))
+}
+
+// GetAllFQMNs returns a list of all FQMNs
+func (r *Registry) GetAllFQMNs() []string {
+ var keys []string
+ for k := range r.msgs {
+ keys = append(keys, k)
+ }
+ return keys
+}
+
+// GetAllFQENs returns a list of all FQENs
+func (r *Registry) GetAllFQENs() []string {
+ var keys []string
+ for k := range r.enums {
+ keys = append(keys, k)
+ }
+ return keys
+}
+
+// SetAllowDeleteBody controls whether http delete methods may have a
+// body or fail loading if encountered.
+func (r *Registry) SetAllowDeleteBody(allow bool) {
+ r.allowDeleteBody = allow
+}
+
+// SetAllowMerge controls whether generation one swagger file out of multiple protos
+func (r *Registry) SetAllowMerge(allow bool) {
+ r.allowMerge = allow
+}
+
+// IsAllowMerge whether generation one swagger file out of multiple protos
+func (r *Registry) IsAllowMerge() bool {
+ return r.allowMerge
+}
+
+// SetMergeFileName controls the target swagger file name out of multiple protos
+func (r *Registry) SetMergeFileName(mergeFileName string) {
+ r.mergeFileName = mergeFileName
+}
+
+// SetAllowRepeatedFieldsInBody controls whether repeated field can be used
+// in `body` and `response_body` (`google.api.http` annotation option) field path or not
+func (r *Registry) SetAllowRepeatedFieldsInBody(allow bool) {
+ r.allowRepeatedFieldsInBody = allow
+}
+
+// IsAllowRepeatedFieldsInBody checks if repeated field can be used
+// in `body` and `response_body` (`google.api.http` annotation option) field path or not
+func (r *Registry) IsAllowRepeatedFieldsInBody() bool {
+ return r.allowRepeatedFieldsInBody
+}
+
+// SetIncludePackageInTags controls whether the package name defined in the `package` directive
+// in the proto file can be prepended to the gRPC service name in the `Tags` field of every operation.
+func (r *Registry) SetIncludePackageInTags(allow bool) {
+ r.includePackageInTags = allow
+}
+
+// IsIncludePackageInTags checks whether the package name defined in the `package` directive
+// in the proto file can be prepended to the gRPC service name in the `Tags` field of every operation.
+func (r *Registry) IsIncludePackageInTags() bool {
+ return r.includePackageInTags
+}
+
+// GetRepeatedPathParamSeparator returns a rune spcifying how
+// path parameter repeated fields are separated.
+func (r *Registry) GetRepeatedPathParamSeparator() rune {
+ return r.repeatedPathParamSeparator.sep
+}
+
+// GetRepeatedPathParamSeparatorName returns the name path parameter repeated
+// fields repeatedFieldSeparator. I.e. 'csv', 'pipe', 'ssv' or 'tsv'
+func (r *Registry) GetRepeatedPathParamSeparatorName() string {
+ return r.repeatedPathParamSeparator.name
+}
+
+// SetRepeatedPathParamSeparator sets how path parameter repeated fields are
+// separated. Allowed names are 'csv', 'pipe', 'ssv' and 'tsv'.
+func (r *Registry) SetRepeatedPathParamSeparator(name string) error {
+ var sep rune
+ switch name {
+ case "csv":
+ sep = ','
+ case "pipes":
+ sep = '|'
+ case "ssv":
+ sep = ' '
+ case "tsv":
+ sep = '\t'
+ default:
+ return fmt.Errorf("unknown repeated path parameter separator: %s", name)
+ }
+ r.repeatedPathParamSeparator = repeatedFieldSeparator{
+ name: name,
+ sep: sep,
+ }
+ return nil
+}
+
+// SetUseJSONNamesForFields sets useJSONNamesForFields
+func (r *Registry) SetUseJSONNamesForFields(use bool) {
+ r.useJSONNamesForFields = use
+}
+
+// GetUseJSONNamesForFields returns useJSONNamesForFields
+func (r *Registry) GetUseJSONNamesForFields() bool {
+ return r.useJSONNamesForFields
+}
+
+// SetUseFQNForSwaggerName sets useFQNForSwaggerName
+func (r *Registry) SetUseFQNForSwaggerName(use bool) {
+ r.useFQNForSwaggerName = use
+}
+
+// GetAllowColonFinalSegments returns allowColonFinalSegments
+func (r *Registry) GetAllowColonFinalSegments() bool {
+ return r.allowColonFinalSegments
+}
+
+// SetAllowColonFinalSegments sets allowColonFinalSegments
+func (r *Registry) SetAllowColonFinalSegments(use bool) {
+ r.allowColonFinalSegments = use
+}
+
+// GetUseFQNForSwaggerName returns useFQNForSwaggerName
+func (r *Registry) GetUseFQNForSwaggerName() bool {
+ return r.useFQNForSwaggerName
+}
+
+// GetMergeFileName return the target merge swagger file name
+func (r *Registry) GetMergeFileName() string {
+ return r.mergeFileName
+}
+
+// sanitizePackageName replaces unallowed character in package name
+// with allowed character.
+func sanitizePackageName(pkgName string) string {
+ pkgName = strings.Replace(pkgName, ".", "_", -1)
+ pkgName = strings.Replace(pkgName, "-", "_", -1)
+ return pkgName
+}
+
+// defaultGoPackageName returns the default go package name to be used for go files generated from "f".
+// You might need to use an unique alias for the package when you import it. Use ReserveGoPackageAlias to get a unique alias.
+func (r *Registry) defaultGoPackageName(f *descriptor.FileDescriptorProto) string {
+ name := r.packageIdentityName(f)
+ return sanitizePackageName(name)
+}
+
+// packageIdentityName returns the identity of packages.
+// protoc-gen-grpc-gateway rejects CodeGenerationRequests which contains more than one packages
+// as protoc-gen-go does.
+func (r *Registry) packageIdentityName(f *descriptor.FileDescriptorProto) string {
+ if f.Options != nil && f.Options.GoPackage != nil {
+ gopkg := f.Options.GetGoPackage()
+ idx := strings.LastIndex(gopkg, "/")
+ if idx < 0 {
+ gopkg = gopkg[idx+1:]
+ }
+
+ gopkg = gopkg[idx+1:]
+ // package name is overrided with the string after the
+ // ';' character
+ sc := strings.IndexByte(gopkg, ';')
+ if sc < 0 {
+ return sanitizePackageName(gopkg)
+
+ }
+ return sanitizePackageName(gopkg[sc+1:])
+ }
+ if p := r.importPath; len(p) != 0 {
+ if i := strings.LastIndex(p, "/"); i >= 0 {
+ p = p[i+1:]
+ }
+ return p
+ }
+
+ if f.Package == nil {
+ base := filepath.Base(f.GetName())
+ ext := filepath.Ext(base)
+ return strings.TrimSuffix(base, ext)
+ }
+ return f.GetPackage()
+}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services.go
new file mode 100644
index 0000000..8916d31
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services.go
@@ -0,0 +1,304 @@
+package descriptor
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/golang/glog"
+ "github.com/golang/protobuf/proto"
+ descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor"
+ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule"
+ options "google.golang.org/genproto/googleapis/api/annotations"
+)
+
+// loadServices registers services and their methods from "targetFile" to "r".
+// It must be called after loadFile is called for all files so that loadServices
+// can resolve names of message types and their fields.
+func (r *Registry) loadServices(file *File) error {
+ glog.V(1).Infof("Loading services from %s", file.GetName())
+ var svcs []*Service
+ for _, sd := range file.GetService() {
+ glog.V(2).Infof("Registering %s", sd.GetName())
+ svc := &Service{
+ File: file,
+ ServiceDescriptorProto: sd,
+ }
+ for _, md := range sd.GetMethod() {
+ glog.V(2).Infof("Processing %s.%s", sd.GetName(), md.GetName())
+ opts, err := extractAPIOptions(md)
+ if err != nil {
+ glog.Errorf("Failed to extract HttpRule from %s.%s: %v", svc.GetName(), md.GetName(), err)
+ return err
+ }
+ optsList := r.LookupExternalHTTPRules((&Method{Service: svc, MethodDescriptorProto: md}).FQMN())
+ if opts != nil {
+ optsList = append(optsList, opts)
+ }
+ if len(optsList) == 0 {
+ glog.V(1).Infof("Found non-target method: %s.%s", svc.GetName(), md.GetName())
+ }
+ meth, err := r.newMethod(svc, md, optsList)
+ if err != nil {
+ return err
+ }
+ svc.Methods = append(svc.Methods, meth)
+ }
+ if len(svc.Methods) == 0 {
+ continue
+ }
+ glog.V(2).Infof("Registered %s with %d method(s)", svc.GetName(), len(svc.Methods))
+ svcs = append(svcs, svc)
+ }
+ file.Services = svcs
+ return nil
+}
+
+func (r *Registry) newMethod(svc *Service, md *descriptor.MethodDescriptorProto, optsList []*options.HttpRule) (*Method, error) {
+ requestType, err := r.LookupMsg(svc.File.GetPackage(), md.GetInputType())
+ if err != nil {
+ return nil, err
+ }
+ responseType, err := r.LookupMsg(svc.File.GetPackage(), md.GetOutputType())
+ if err != nil {
+ return nil, err
+ }
+ meth := &Method{
+ Service: svc,
+ MethodDescriptorProto: md,
+ RequestType: requestType,
+ ResponseType: responseType,
+ }
+
+ newBinding := func(opts *options.HttpRule, idx int) (*Binding, error) {
+ var (
+ httpMethod string
+ pathTemplate string
+ )
+ switch {
+ case opts.GetGet() != "":
+ httpMethod = "GET"
+ pathTemplate = opts.GetGet()
+ if opts.Body != "" {
+ return nil, fmt.Errorf("must not set request body when http method is GET: %s", md.GetName())
+ }
+
+ case opts.GetPut() != "":
+ httpMethod = "PUT"
+ pathTemplate = opts.GetPut()
+
+ case opts.GetPost() != "":
+ httpMethod = "POST"
+ pathTemplate = opts.GetPost()
+
+ case opts.GetDelete() != "":
+ httpMethod = "DELETE"
+ pathTemplate = opts.GetDelete()
+ if opts.Body != "" && !r.allowDeleteBody {
+ return nil, fmt.Errorf("must not set request body when http method is DELETE except allow_delete_body option is true: %s", md.GetName())
+ }
+
+ case opts.GetPatch() != "":
+ httpMethod = "PATCH"
+ pathTemplate = opts.GetPatch()
+
+ case opts.GetCustom() != nil:
+ custom := opts.GetCustom()
+ httpMethod = custom.Kind
+ pathTemplate = custom.Path
+
+ default:
+ glog.V(1).Infof("No pattern specified in google.api.HttpRule: %s", md.GetName())
+ return nil, nil
+ }
+
+ parsed, err := httprule.Parse(pathTemplate)
+ if err != nil {
+ return nil, err
+ }
+ tmpl := parsed.Compile()
+
+ if md.GetClientStreaming() && len(tmpl.Fields) > 0 {
+ return nil, fmt.Errorf("cannot use path parameter in client streaming")
+ }
+
+ b := &Binding{
+ Method: meth,
+ Index: idx,
+ PathTmpl: tmpl,
+ HTTPMethod: httpMethod,
+ }
+
+ for _, f := range tmpl.Fields {
+ param, err := r.newParam(meth, f)
+ if err != nil {
+ return nil, err
+ }
+ b.PathParams = append(b.PathParams, param)
+ }
+
+ // TODO(yugui) Handle query params
+
+ b.Body, err = r.newBody(meth, opts.Body)
+ if err != nil {
+ return nil, err
+ }
+
+ b.ResponseBody, err = r.newResponse(meth, opts.ResponseBody)
+ if err != nil {
+ return nil, err
+ }
+
+ return b, nil
+ }
+
+ applyOpts := func(opts *options.HttpRule) error {
+ b, err := newBinding(opts, len(meth.Bindings))
+ if err != nil {
+ return err
+ }
+
+ if b != nil {
+ meth.Bindings = append(meth.Bindings, b)
+ }
+ for _, additional := range opts.GetAdditionalBindings() {
+ if len(additional.AdditionalBindings) > 0 {
+ return fmt.Errorf("additional_binding in additional_binding not allowed: %s.%s", svc.GetName(), meth.GetName())
+ }
+ b, err := newBinding(additional, len(meth.Bindings))
+ if err != nil {
+ return err
+ }
+ meth.Bindings = append(meth.Bindings, b)
+ }
+
+ return nil
+ }
+
+ for _, opts := range optsList {
+ if err := applyOpts(opts); err != nil {
+ return nil, err
+ }
+ }
+
+ return meth, nil
+}
+
+func extractAPIOptions(meth *descriptor.MethodDescriptorProto) (*options.HttpRule, error) {
+ if meth.Options == nil {
+ return nil, nil
+ }
+ if !proto.HasExtension(meth.Options, options.E_Http) {
+ return nil, nil
+ }
+ ext, err := proto.GetExtension(meth.Options, options.E_Http)
+ if err != nil {
+ return nil, err
+ }
+ opts, ok := ext.(*options.HttpRule)
+ if !ok {
+ return nil, fmt.Errorf("extension is %T; want an HttpRule", ext)
+ }
+ return opts, nil
+}
+
+func (r *Registry) newParam(meth *Method, path string) (Parameter, error) {
+ msg := meth.RequestType
+ fields, err := r.resolveFieldPath(msg, path, true)
+ if err != nil {
+ return Parameter{}, err
+ }
+ l := len(fields)
+ if l == 0 {
+ return Parameter{}, fmt.Errorf("invalid field access list for %s", path)
+ }
+ target := fields[l-1].Target
+ switch target.GetType() {
+ case descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_GROUP:
+ glog.V(2).Infoln("found aggregate type:", target, target.TypeName)
+ if IsWellKnownType(*target.TypeName) {
+ glog.V(2).Infoln("found well known aggregate type:", target)
+ } else {
+ return Parameter{}, fmt.Errorf("aggregate type %s in parameter of %s.%s: %s", target.Type, meth.Service.GetName(), meth.GetName(), path)
+ }
+ }
+ return Parameter{
+ FieldPath: FieldPath(fields),
+ Method: meth,
+ Target: fields[l-1].Target,
+ }, nil
+}
+
+func (r *Registry) newBody(meth *Method, path string) (*Body, error) {
+ msg := meth.RequestType
+ switch path {
+ case "":
+ return nil, nil
+ case "*":
+ return &Body{FieldPath: nil}, nil
+ }
+ fields, err := r.resolveFieldPath(msg, path, false)
+ if err != nil {
+ return nil, err
+ }
+ return &Body{FieldPath: FieldPath(fields)}, nil
+}
+
+func (r *Registry) newResponse(meth *Method, path string) (*Body, error) {
+ msg := meth.ResponseType
+ switch path {
+ case "", "*":
+ return nil, nil
+ }
+ fields, err := r.resolveFieldPath(msg, path, false)
+ if err != nil {
+ return nil, err
+ }
+ return &Body{FieldPath: FieldPath(fields)}, nil
+}
+
+// lookupField looks up a field named "name" within "msg".
+// It returns nil if no such field found.
+func lookupField(msg *Message, name string) *Field {
+ for _, f := range msg.Fields {
+ if f.GetName() == name {
+ return f
+ }
+ }
+ return nil
+}
+
+// resolveFieldPath resolves "path" into a list of fieldDescriptor, starting from "msg".
+func (r *Registry) resolveFieldPath(msg *Message, path string, isPathParam bool) ([]FieldPathComponent, error) {
+ if path == "" {
+ return nil, nil
+ }
+
+ root := msg
+ var result []FieldPathComponent
+ for i, c := range strings.Split(path, ".") {
+ if i > 0 {
+ f := result[i-1].Target
+ switch f.GetType() {
+ case descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_GROUP:
+ var err error
+ msg, err = r.LookupMsg(msg.FQMN(), f.GetTypeName())
+ if err != nil {
+ return nil, err
+ }
+ default:
+ return nil, fmt.Errorf("not an aggregate type: %s in %s", f.GetName(), path)
+ }
+ }
+
+ glog.V(2).Infof("Lookup %s in %s", c, msg.FQMN())
+ f := lookupField(msg, c)
+ if f == nil {
+ return nil, fmt.Errorf("no field %q found in %s", path, root.GetName())
+ }
+ if !(isPathParam || r.allowRepeatedFieldsInBody) && f.GetLabel() == descriptor.FieldDescriptorProto_LABEL_REPEATED {
+ return nil, fmt.Errorf("repeated field not allowed in field path: %s in %s", f.GetName(), path)
+ }
+ result = append(result, FieldPathComponent{Name: c, Target: f})
+ }
+ return result, nil
+}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types.go
new file mode 100644
index 0000000..4aa75f8
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types.go
@@ -0,0 +1,466 @@
+package descriptor
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/golang/protobuf/protoc-gen-go/descriptor"
+ gogen "github.com/golang/protobuf/protoc-gen-go/generator"
+ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule"
+)
+
+// IsWellKnownType returns true if the provided fully qualified type name is considered 'well-known'.
+func IsWellKnownType(typeName string) bool {
+ _, ok := wellKnownTypeConv[typeName]
+ return ok
+}
+
+// GoPackage represents a golang package
+type GoPackage struct {
+ // Path is the package path to the package.
+ Path string
+ // Name is the package name of the package
+ Name string
+ // Alias is an alias of the package unique within the current invokation of grpc-gateway generator.
+ Alias string
+}
+
+// Standard returns whether the import is a golang standard package.
+func (p GoPackage) Standard() bool {
+ return !strings.Contains(p.Path, ".")
+}
+
+// String returns a string representation of this package in the form of import line in golang.
+func (p GoPackage) String() string {
+ if p.Alias == "" {
+ return fmt.Sprintf("%q", p.Path)
+ }
+ return fmt.Sprintf("%s %q", p.Alias, p.Path)
+}
+
+// File wraps descriptor.FileDescriptorProto for richer features.
+type File struct {
+ *descriptor.FileDescriptorProto
+ // GoPkg is the go package of the go file generated from this file..
+ GoPkg GoPackage
+ // Messages is the list of messages defined in this file.
+ Messages []*Message
+ // Enums is the list of enums defined in this file.
+ Enums []*Enum
+ // Services is the list of services defined in this file.
+ Services []*Service
+}
+
+// proto2 determines if the syntax of the file is proto2.
+func (f *File) proto2() bool {
+ return f.Syntax == nil || f.GetSyntax() == "proto2"
+}
+
+// Message describes a protocol buffer message types
+type Message struct {
+ // File is the file where the message is defined
+ File *File
+ // Outers is a list of outer messages if this message is a nested type.
+ Outers []string
+ *descriptor.DescriptorProto
+ Fields []*Field
+
+ // Index is proto path index of this message in File.
+ Index int
+}
+
+// FQMN returns a fully qualified message name of this message.
+func (m *Message) FQMN() string {
+ components := []string{""}
+ if m.File.Package != nil {
+ components = append(components, m.File.GetPackage())
+ }
+ components = append(components, m.Outers...)
+ components = append(components, m.GetName())
+ return strings.Join(components, ".")
+}
+
+// GoType returns a go type name for the message type.
+// It prefixes the type name with the package alias if
+// its belonging package is not "currentPackage".
+func (m *Message) GoType(currentPackage string) string {
+ var components []string
+ components = append(components, m.Outers...)
+ components = append(components, m.GetName())
+
+ name := strings.Join(components, "_")
+ if m.File.GoPkg.Path == currentPackage {
+ return name
+ }
+ pkg := m.File.GoPkg.Name
+ if alias := m.File.GoPkg.Alias; alias != "" {
+ pkg = alias
+ }
+ return fmt.Sprintf("%s.%s", pkg, name)
+}
+
+// Enum describes a protocol buffer enum types
+type Enum struct {
+ // File is the file where the enum is defined
+ File *File
+ // Outers is a list of outer messages if this enum is a nested type.
+ Outers []string
+ *descriptor.EnumDescriptorProto
+
+ Index int
+}
+
+// FQEN returns a fully qualified enum name of this enum.
+func (e *Enum) FQEN() string {
+ components := []string{""}
+ if e.File.Package != nil {
+ components = append(components, e.File.GetPackage())
+ }
+ components = append(components, e.Outers...)
+ components = append(components, e.GetName())
+ return strings.Join(components, ".")
+}
+
+// GoType returns a go type name for the enum type.
+// It prefixes the type name with the package alias if
+// its belonging package is not "currentPackage".
+func (e *Enum) GoType(currentPackage string) string {
+ var components []string
+ components = append(components, e.Outers...)
+ components = append(components, e.GetName())
+
+ name := strings.Join(components, "_")
+ if e.File.GoPkg.Path == currentPackage {
+ return name
+ }
+ pkg := e.File.GoPkg.Name
+ if alias := e.File.GoPkg.Alias; alias != "" {
+ pkg = alias
+ }
+ return fmt.Sprintf("%s.%s", pkg, name)
+}
+
+// Service wraps descriptor.ServiceDescriptorProto for richer features.
+type Service struct {
+ // File is the file where this service is defined.
+ File *File
+ *descriptor.ServiceDescriptorProto
+ // Methods is the list of methods defined in this service.
+ Methods []*Method
+}
+
+// FQSN returns the fully qualified service name of this service.
+func (s *Service) FQSN() string {
+ components := []string{""}
+ if s.File.Package != nil {
+ components = append(components, s.File.GetPackage())
+ }
+ components = append(components, s.GetName())
+ return strings.Join(components, ".")
+}
+
+// Method wraps descriptor.MethodDescriptorProto for richer features.
+type Method struct {
+ // Service is the service which this method belongs to.
+ Service *Service
+ *descriptor.MethodDescriptorProto
+
+ // RequestType is the message type of requests to this method.
+ RequestType *Message
+ // ResponseType is the message type of responses from this method.
+ ResponseType *Message
+ Bindings []*Binding
+}
+
+// FQMN returns a fully qualified rpc method name of this method.
+func (m *Method) FQMN() string {
+ components := []string{}
+ components = append(components, m.Service.FQSN())
+ components = append(components, m.GetName())
+ return strings.Join(components, ".")
+}
+
+// Binding describes how an HTTP endpoint is bound to a gRPC method.
+type Binding struct {
+ // Method is the method which the endpoint is bound to.
+ Method *Method
+ // Index is a zero-origin index of the binding in the target method
+ Index int
+ // PathTmpl is path template where this method is mapped to.
+ PathTmpl httprule.Template
+ // HTTPMethod is the HTTP method which this method is mapped to.
+ HTTPMethod string
+ // PathParams is the list of parameters provided in HTTP request paths.
+ PathParams []Parameter
+ // Body describes parameters provided in HTTP request body.
+ Body *Body
+ // ResponseBody describes field in response struct to marshal in HTTP response body.
+ ResponseBody *Body
+}
+
+// ExplicitParams returns a list of explicitly bound parameters of "b",
+// i.e. a union of field path for body and field paths for path parameters.
+func (b *Binding) ExplicitParams() []string {
+ var result []string
+ if b.Body != nil {
+ result = append(result, b.Body.FieldPath.String())
+ }
+ for _, p := range b.PathParams {
+ result = append(result, p.FieldPath.String())
+ }
+ return result
+}
+
+// Field wraps descriptor.FieldDescriptorProto for richer features.
+type Field struct {
+ // Message is the message type which this field belongs to.
+ Message *Message
+ // FieldMessage is the message type of the field.
+ FieldMessage *Message
+ *descriptor.FieldDescriptorProto
+}
+
+// Parameter is a parameter provided in http requests
+type Parameter struct {
+ // FieldPath is a path to a proto field which this parameter is mapped to.
+ FieldPath
+ // Target is the proto field which this parameter is mapped to.
+ Target *Field
+ // Method is the method which this parameter is used for.
+ Method *Method
+}
+
+// ConvertFuncExpr returns a go expression of a converter function.
+// The converter function converts a string into a value for the parameter.
+func (p Parameter) ConvertFuncExpr() (string, error) {
+ tbl := proto3ConvertFuncs
+ if !p.IsProto2() && p.IsRepeated() {
+ tbl = proto3RepeatedConvertFuncs
+ } else if p.IsProto2() && !p.IsRepeated() {
+ tbl = proto2ConvertFuncs
+ } else if p.IsProto2() && p.IsRepeated() {
+ tbl = proto2RepeatedConvertFuncs
+ }
+ typ := p.Target.GetType()
+ conv, ok := tbl[typ]
+ if !ok {
+ conv, ok = wellKnownTypeConv[p.Target.GetTypeName()]
+ }
+ if !ok {
+ return "", fmt.Errorf("unsupported field type %s of parameter %s in %s.%s", typ, p.FieldPath, p.Method.Service.GetName(), p.Method.GetName())
+ }
+ return conv, nil
+}
+
+// IsEnum returns true if the field is an enum type, otherwise false is returned.
+func (p Parameter) IsEnum() bool {
+ return p.Target.GetType() == descriptor.FieldDescriptorProto_TYPE_ENUM
+}
+
+// IsRepeated returns true if the field is repeated, otherwise false is returned.
+func (p Parameter) IsRepeated() bool {
+ return p.Target.GetLabel() == descriptor.FieldDescriptorProto_LABEL_REPEATED
+}
+
+// IsProto2 returns true if the field is proto2, otherwise false is returned.
+func (p Parameter) IsProto2() bool {
+ return p.Target.Message.File.proto2()
+}
+
+// Body describes a http (request|response) body to be sent to the (method|client).
+// This is used in body and response_body options in google.api.HttpRule
+type Body struct {
+ // FieldPath is a path to a proto field which the (request|response) body is mapped to.
+ // The (request|response) body is mapped to the (request|response) type itself if FieldPath is empty.
+ FieldPath FieldPath
+}
+
+// AssignableExpr returns an assignable expression in Go to be used to initialize method request object.
+// It starts with "msgExpr", which is the go expression of the method request object.
+func (b Body) AssignableExpr(msgExpr string) string {
+ return b.FieldPath.AssignableExpr(msgExpr)
+}
+
+// FieldPath is a path to a field from a request message.
+type FieldPath []FieldPathComponent
+
+// String returns a string representation of the field path.
+func (p FieldPath) String() string {
+ var components []string
+ for _, c := range p {
+ components = append(components, c.Name)
+ }
+ return strings.Join(components, ".")
+}
+
+// IsNestedProto3 indicates whether the FieldPath is a nested Proto3 path.
+func (p FieldPath) IsNestedProto3() bool {
+ if len(p) > 1 && !p[0].Target.Message.File.proto2() {
+ return true
+ }
+ return false
+}
+
+// AssignableExpr is an assignable expression in Go to be used to assign a value to the target field.
+// It starts with "msgExpr", which is the go expression of the method request object.
+func (p FieldPath) AssignableExpr(msgExpr string) string {
+ l := len(p)
+ if l == 0 {
+ return msgExpr
+ }
+
+ var preparations []string
+ components := msgExpr
+ for i, c := range p {
+ // Check if it is a oneOf field.
+ if c.Target.OneofIndex != nil {
+ index := c.Target.OneofIndex
+ msg := c.Target.Message
+ oneOfName := gogen.CamelCase(msg.GetOneofDecl()[*index].GetName())
+ oneofFieldName := msg.GetName() + "_" + c.AssignableExpr()
+
+ components = components + "." + oneOfName
+ s := `if %s == nil {
+ %s =&%s{}
+ } else if _, ok := %s.(*%s); !ok {
+ return nil, metadata, grpc.Errorf(codes.InvalidArgument, "expect type: *%s, but: %%t\n",%s)
+ }`
+
+ preparations = append(preparations, fmt.Sprintf(s, components, components, oneofFieldName, components, oneofFieldName, oneofFieldName, components))
+ components = components + ".(*" + oneofFieldName + ")"
+ }
+
+ if i == l-1 {
+ components = components + "." + c.AssignableExpr()
+ continue
+ }
+ components = components + "." + c.ValueExpr()
+ }
+
+ preparations = append(preparations, components)
+ return strings.Join(preparations, "\n")
+}
+
+// FieldPathComponent is a path component in FieldPath
+type FieldPathComponent struct {
+ // Name is a name of the proto field which this component corresponds to.
+ // TODO(yugui) is this necessary?
+ Name string
+ // Target is the proto field which this component corresponds to.
+ Target *Field
+}
+
+// AssignableExpr returns an assignable expression in go for this field.
+func (c FieldPathComponent) AssignableExpr() string {
+ return gogen.CamelCase(c.Name)
+}
+
+// ValueExpr returns an expression in go for this field.
+func (c FieldPathComponent) ValueExpr() string {
+ if c.Target.Message.File.proto2() {
+ return fmt.Sprintf("Get%s()", gogen.CamelCase(c.Name))
+ }
+ return gogen.CamelCase(c.Name)
+}
+
+var (
+ proto3ConvertFuncs = map[descriptor.FieldDescriptorProto_Type]string{
+ descriptor.FieldDescriptorProto_TYPE_DOUBLE: "runtime.Float64",
+ descriptor.FieldDescriptorProto_TYPE_FLOAT: "runtime.Float32",
+ descriptor.FieldDescriptorProto_TYPE_INT64: "runtime.Int64",
+ descriptor.FieldDescriptorProto_TYPE_UINT64: "runtime.Uint64",
+ descriptor.FieldDescriptorProto_TYPE_INT32: "runtime.Int32",
+ descriptor.FieldDescriptorProto_TYPE_FIXED64: "runtime.Uint64",
+ descriptor.FieldDescriptorProto_TYPE_FIXED32: "runtime.Uint32",
+ descriptor.FieldDescriptorProto_TYPE_BOOL: "runtime.Bool",
+ descriptor.FieldDescriptorProto_TYPE_STRING: "runtime.String",
+ // FieldDescriptorProto_TYPE_GROUP
+ // FieldDescriptorProto_TYPE_MESSAGE
+ descriptor.FieldDescriptorProto_TYPE_BYTES: "runtime.Bytes",
+ descriptor.FieldDescriptorProto_TYPE_UINT32: "runtime.Uint32",
+ descriptor.FieldDescriptorProto_TYPE_ENUM: "runtime.Enum",
+ descriptor.FieldDescriptorProto_TYPE_SFIXED32: "runtime.Int32",
+ descriptor.FieldDescriptorProto_TYPE_SFIXED64: "runtime.Int64",
+ descriptor.FieldDescriptorProto_TYPE_SINT32: "runtime.Int32",
+ descriptor.FieldDescriptorProto_TYPE_SINT64: "runtime.Int64",
+ }
+
+ proto3RepeatedConvertFuncs = map[descriptor.FieldDescriptorProto_Type]string{
+ descriptor.FieldDescriptorProto_TYPE_DOUBLE: "runtime.Float64Slice",
+ descriptor.FieldDescriptorProto_TYPE_FLOAT: "runtime.Float32Slice",
+ descriptor.FieldDescriptorProto_TYPE_INT64: "runtime.Int64Slice",
+ descriptor.FieldDescriptorProto_TYPE_UINT64: "runtime.Uint64Slice",
+ descriptor.FieldDescriptorProto_TYPE_INT32: "runtime.Int32Slice",
+ descriptor.FieldDescriptorProto_TYPE_FIXED64: "runtime.Uint64Slice",
+ descriptor.FieldDescriptorProto_TYPE_FIXED32: "runtime.Uint32Slice",
+ descriptor.FieldDescriptorProto_TYPE_BOOL: "runtime.BoolSlice",
+ descriptor.FieldDescriptorProto_TYPE_STRING: "runtime.StringSlice",
+ // FieldDescriptorProto_TYPE_GROUP
+ // FieldDescriptorProto_TYPE_MESSAGE
+ descriptor.FieldDescriptorProto_TYPE_BYTES: "runtime.BytesSlice",
+ descriptor.FieldDescriptorProto_TYPE_UINT32: "runtime.Uint32Slice",
+ descriptor.FieldDescriptorProto_TYPE_ENUM: "runtime.EnumSlice",
+ descriptor.FieldDescriptorProto_TYPE_SFIXED32: "runtime.Int32Slice",
+ descriptor.FieldDescriptorProto_TYPE_SFIXED64: "runtime.Int64Slice",
+ descriptor.FieldDescriptorProto_TYPE_SINT32: "runtime.Int32Slice",
+ descriptor.FieldDescriptorProto_TYPE_SINT64: "runtime.Int64Slice",
+ }
+
+ proto2ConvertFuncs = map[descriptor.FieldDescriptorProto_Type]string{
+ descriptor.FieldDescriptorProto_TYPE_DOUBLE: "runtime.Float64P",
+ descriptor.FieldDescriptorProto_TYPE_FLOAT: "runtime.Float32P",
+ descriptor.FieldDescriptorProto_TYPE_INT64: "runtime.Int64P",
+ descriptor.FieldDescriptorProto_TYPE_UINT64: "runtime.Uint64P",
+ descriptor.FieldDescriptorProto_TYPE_INT32: "runtime.Int32P",
+ descriptor.FieldDescriptorProto_TYPE_FIXED64: "runtime.Uint64P",
+ descriptor.FieldDescriptorProto_TYPE_FIXED32: "runtime.Uint32P",
+ descriptor.FieldDescriptorProto_TYPE_BOOL: "runtime.BoolP",
+ descriptor.FieldDescriptorProto_TYPE_STRING: "runtime.StringP",
+ // FieldDescriptorProto_TYPE_GROUP
+ // FieldDescriptorProto_TYPE_MESSAGE
+ // FieldDescriptorProto_TYPE_BYTES
+ // TODO(yugui) Handle bytes
+ descriptor.FieldDescriptorProto_TYPE_UINT32: "runtime.Uint32P",
+ descriptor.FieldDescriptorProto_TYPE_ENUM: "runtime.EnumP",
+ descriptor.FieldDescriptorProto_TYPE_SFIXED32: "runtime.Int32P",
+ descriptor.FieldDescriptorProto_TYPE_SFIXED64: "runtime.Int64P",
+ descriptor.FieldDescriptorProto_TYPE_SINT32: "runtime.Int32P",
+ descriptor.FieldDescriptorProto_TYPE_SINT64: "runtime.Int64P",
+ }
+
+ proto2RepeatedConvertFuncs = map[descriptor.FieldDescriptorProto_Type]string{
+ descriptor.FieldDescriptorProto_TYPE_DOUBLE: "runtime.Float64Slice",
+ descriptor.FieldDescriptorProto_TYPE_FLOAT: "runtime.Float32Slice",
+ descriptor.FieldDescriptorProto_TYPE_INT64: "runtime.Int64Slice",
+ descriptor.FieldDescriptorProto_TYPE_UINT64: "runtime.Uint64Slice",
+ descriptor.FieldDescriptorProto_TYPE_INT32: "runtime.Int32Slice",
+ descriptor.FieldDescriptorProto_TYPE_FIXED64: "runtime.Uint64Slice",
+ descriptor.FieldDescriptorProto_TYPE_FIXED32: "runtime.Uint32Slice",
+ descriptor.FieldDescriptorProto_TYPE_BOOL: "runtime.BoolSlice",
+ descriptor.FieldDescriptorProto_TYPE_STRING: "runtime.StringSlice",
+ // FieldDescriptorProto_TYPE_GROUP
+ // FieldDescriptorProto_TYPE_MESSAGE
+ // FieldDescriptorProto_TYPE_BYTES
+ // TODO(maros7) Handle bytes
+ descriptor.FieldDescriptorProto_TYPE_UINT32: "runtime.Uint32Slice",
+ descriptor.FieldDescriptorProto_TYPE_ENUM: "runtime.EnumSlice",
+ descriptor.FieldDescriptorProto_TYPE_SFIXED32: "runtime.Int32Slice",
+ descriptor.FieldDescriptorProto_TYPE_SFIXED64: "runtime.Int64Slice",
+ descriptor.FieldDescriptorProto_TYPE_SINT32: "runtime.Int32Slice",
+ descriptor.FieldDescriptorProto_TYPE_SINT64: "runtime.Int64Slice",
+ }
+
+ wellKnownTypeConv = map[string]string{
+ ".google.protobuf.Timestamp": "runtime.Timestamp",
+ ".google.protobuf.Duration": "runtime.Duration",
+ ".google.protobuf.StringValue": "runtime.StringValue",
+ ".google.protobuf.FloatValue": "runtime.FloatValue",
+ ".google.protobuf.DoubleValue": "runtime.DoubleValue",
+ ".google.protobuf.BoolValue": "runtime.BoolValue",
+ ".google.protobuf.BytesValue": "runtime.BytesValue",
+ ".google.protobuf.Int32Value": "runtime.Int32Value",
+ ".google.protobuf.UInt32Value": "runtime.UInt32Value",
+ ".google.protobuf.Int64Value": "runtime.Int64Value",
+ ".google.protobuf.UInt64Value": "runtime.UInt64Value",
+ }
+)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator/BUILD.bazel
new file mode 100644
index 0000000..6cb2162
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator/BUILD.bazel
@@ -0,0 +1,13 @@
+load("@io_bazel_rules_go//go:def.bzl", "go_library")
+
+package(default_visibility = ["//:generators"])
+
+go_library(
+ name = "go_default_library",
+ srcs = ["generator.go"],
+ importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator",
+ deps = [
+ "//protoc-gen-grpc-gateway/descriptor:go_default_library",
+ "@io_bazel_rules_go//proto/wkt:compiler_plugin_go_proto",
+ ],
+)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator/generator.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator/generator.go
new file mode 100644
index 0000000..df55da4
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator/generator.go
@@ -0,0 +1,13 @@
+// Package generator provides an abstract interface to code generators.
+package generator
+
+import (
+ plugin "github.com/golang/protobuf/protoc-gen-go/plugin"
+ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor"
+)
+
+// Generator is an abstraction of code generators.
+type Generator interface {
+ // Generate generates output files from input .proto files.
+ Generate(targets []*descriptor.File) ([]*plugin.CodeGeneratorResponse_File, error)
+}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/BUILD.bazel
new file mode 100644
index 0000000..316010f
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/BUILD.bazel
@@ -0,0 +1,38 @@
+load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
+
+package(default_visibility = ["//protoc-gen-grpc-gateway:__subpackages__"])
+
+go_library(
+ name = "go_default_library",
+ srcs = [
+ "doc.go",
+ "generator.go",
+ "template.go",
+ ],
+ importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway",
+ deps = [
+ "//protoc-gen-grpc-gateway/descriptor:go_default_library",
+ "//protoc-gen-grpc-gateway/generator:go_default_library",
+ "//utilities:go_default_library",
+ "@com_github_golang_glog//:go_default_library",
+ "@com_github_golang_protobuf//proto:go_default_library",
+ "@com_github_golang_protobuf//protoc-gen-go/generator:go_default_library_gen",
+ "@io_bazel_rules_go//proto/wkt:compiler_plugin_go_proto",
+ ],
+)
+
+go_test(
+ name = "go_default_test",
+ size = "small",
+ srcs = [
+ "generator_test.go",
+ "template_test.go",
+ ],
+ embed = [":go_default_library"],
+ deps = [
+ "//protoc-gen-grpc-gateway/descriptor:go_default_library",
+ "//protoc-gen-grpc-gateway/httprule:go_default_library",
+ "@com_github_golang_protobuf//proto:go_default_library",
+ "@io_bazel_rules_go//proto/wkt:descriptor_go_proto",
+ ],
+)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/doc.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/doc.go
new file mode 100644
index 0000000..223d810
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/doc.go
@@ -0,0 +1,2 @@
+// Package gengateway provides a code generator for grpc gateway files.
+package gengateway
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/generator.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/generator.go
new file mode 100644
index 0000000..0b6bfbd
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/generator.go
@@ -0,0 +1,171 @@
+package gengateway
+
+import (
+ "errors"
+ "fmt"
+ "go/format"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/golang/glog"
+ "github.com/golang/protobuf/proto"
+ plugin "github.com/golang/protobuf/protoc-gen-go/plugin"
+ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor"
+ gen "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator"
+)
+
+var (
+ errNoTargetService = errors.New("no target service defined in the file")
+)
+
+type pathType int
+
+const (
+ pathTypeImport pathType = iota
+ pathTypeSourceRelative
+)
+
+type generator struct {
+ reg *descriptor.Registry
+ baseImports []descriptor.GoPackage
+ useRequestContext bool
+ registerFuncSuffix string
+ pathType pathType
+ allowPatchFeature bool
+}
+
+// New returns a new generator which generates grpc gateway files.
+func New(reg *descriptor.Registry, useRequestContext bool, registerFuncSuffix, pathTypeString string, allowPatchFeature bool) gen.Generator {
+ var imports []descriptor.GoPackage
+ for _, pkgpath := range []string{
+ "context",
+ "io",
+ "net/http",
+ "github.com/grpc-ecosystem/grpc-gateway/runtime",
+ "github.com/grpc-ecosystem/grpc-gateway/utilities",
+ "github.com/golang/protobuf/descriptor",
+ "github.com/golang/protobuf/proto",
+ "google.golang.org/grpc",
+ "google.golang.org/grpc/codes",
+ "google.golang.org/grpc/grpclog",
+ "google.golang.org/grpc/status",
+ } {
+ pkg := descriptor.GoPackage{
+ Path: pkgpath,
+ Name: path.Base(pkgpath),
+ }
+ if err := reg.ReserveGoPackageAlias(pkg.Name, pkg.Path); err != nil {
+ for i := 0; ; i++ {
+ alias := fmt.Sprintf("%s_%d", pkg.Name, i)
+ if err := reg.ReserveGoPackageAlias(alias, pkg.Path); err != nil {
+ continue
+ }
+ pkg.Alias = alias
+ break
+ }
+ }
+ imports = append(imports, pkg)
+ }
+
+ var pathType pathType
+ switch pathTypeString {
+ case "", "import":
+ // paths=import is default
+ case "source_relative":
+ pathType = pathTypeSourceRelative
+ default:
+ glog.Fatalf(`Unknown path type %q: want "import" or "source_relative".`, pathTypeString)
+ }
+
+ return &generator{
+ reg: reg,
+ baseImports: imports,
+ useRequestContext: useRequestContext,
+ registerFuncSuffix: registerFuncSuffix,
+ pathType: pathType,
+ allowPatchFeature: allowPatchFeature,
+ }
+}
+
+func (g *generator) Generate(targets []*descriptor.File) ([]*plugin.CodeGeneratorResponse_File, error) {
+ var files []*plugin.CodeGeneratorResponse_File
+ for _, file := range targets {
+ glog.V(1).Infof("Processing %s", file.GetName())
+ code, err := g.generate(file)
+ if err == errNoTargetService {
+ glog.V(1).Infof("%s: %v", file.GetName(), err)
+ continue
+ }
+ if err != nil {
+ return nil, err
+ }
+ formatted, err := format.Source([]byte(code))
+ if err != nil {
+ glog.Errorf("%v: %s", err, code)
+ return nil, err
+ }
+ name := file.GetName()
+ if g.pathType == pathTypeImport && file.GoPkg.Path != "" {
+ name = fmt.Sprintf("%s/%s", file.GoPkg.Path, filepath.Base(name))
+ }
+ ext := filepath.Ext(name)
+ base := strings.TrimSuffix(name, ext)
+ output := fmt.Sprintf("%s.pb.gw.go", base)
+ files = append(files, &plugin.CodeGeneratorResponse_File{
+ Name: proto.String(output),
+ Content: proto.String(string(formatted)),
+ })
+ glog.V(1).Infof("Will emit %s", output)
+ }
+ return files, nil
+}
+
+func (g *generator) generate(file *descriptor.File) (string, error) {
+ pkgSeen := make(map[string]bool)
+ var imports []descriptor.GoPackage
+ for _, pkg := range g.baseImports {
+ pkgSeen[pkg.Path] = true
+ imports = append(imports, pkg)
+ }
+ for _, svc := range file.Services {
+ for _, m := range svc.Methods {
+ imports = append(imports, g.addEnumPathParamImports(file, m, pkgSeen)...)
+ pkg := m.RequestType.File.GoPkg
+ if len(m.Bindings) == 0 ||
+ pkg == file.GoPkg || pkgSeen[pkg.Path] {
+ continue
+ }
+ pkgSeen[pkg.Path] = true
+ imports = append(imports, pkg)
+ }
+ }
+ params := param{
+ File: file,
+ Imports: imports,
+ UseRequestContext: g.useRequestContext,
+ RegisterFuncSuffix: g.registerFuncSuffix,
+ AllowPatchFeature: g.allowPatchFeature,
+ }
+ return applyTemplate(params, g.reg)
+}
+
+// addEnumPathParamImports handles adding import of enum path parameter go packages
+func (g *generator) addEnumPathParamImports(file *descriptor.File, m *descriptor.Method, pkgSeen map[string]bool) []descriptor.GoPackage {
+ var imports []descriptor.GoPackage
+ for _, b := range m.Bindings {
+ for _, p := range b.PathParams {
+ e, err := g.reg.LookupEnum("", p.Target.GetTypeName())
+ if err != nil {
+ continue
+ }
+ pkg := e.File.GoPkg
+ if pkg == file.GoPkg || pkgSeen[pkg.Path] {
+ continue
+ }
+ pkgSeen[pkg.Path] = true
+ imports = append(imports, pkg)
+ }
+ }
+ return imports
+}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/template.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/template.go
new file mode 100644
index 0000000..1d3d3ca
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/template.go
@@ -0,0 +1,715 @@
+package gengateway
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "strings"
+ "text/template"
+
+ "github.com/golang/glog"
+ generator2 "github.com/golang/protobuf/protoc-gen-go/generator"
+ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor"
+ "github.com/grpc-ecosystem/grpc-gateway/utilities"
+)
+
+type param struct {
+ *descriptor.File
+ Imports []descriptor.GoPackage
+ UseRequestContext bool
+ RegisterFuncSuffix string
+ AllowPatchFeature bool
+}
+
+type binding struct {
+ *descriptor.Binding
+ Registry *descriptor.Registry
+ AllowPatchFeature bool
+}
+
+// GetBodyFieldPath returns the binding body's fieldpath.
+func (b binding) GetBodyFieldPath() string {
+ if b.Body != nil && len(b.Body.FieldPath) != 0 {
+ return b.Body.FieldPath.String()
+ }
+ return "*"
+}
+
+// GetBodyFieldPath returns the binding body's struct field name.
+func (b binding) GetBodyFieldStructName() (string, error) {
+ if b.Body != nil && len(b.Body.FieldPath) != 0 {
+ return generator2.CamelCase(b.Body.FieldPath.String()), nil
+ }
+ return "", errors.New("No body field found")
+}
+
+// HasQueryParam determines if the binding needs parameters in query string.
+//
+// It sometimes returns true even though actually the binding does not need.
+// But it is not serious because it just results in a small amount of extra codes generated.
+func (b binding) HasQueryParam() bool {
+ if b.Body != nil && len(b.Body.FieldPath) == 0 {
+ return false
+ }
+ fields := make(map[string]bool)
+ for _, f := range b.Method.RequestType.Fields {
+ fields[f.GetName()] = true
+ }
+ if b.Body != nil {
+ delete(fields, b.Body.FieldPath.String())
+ }
+ for _, p := range b.PathParams {
+ delete(fields, p.FieldPath.String())
+ }
+ return len(fields) > 0
+}
+
+func (b binding) QueryParamFilter() queryParamFilter {
+ var seqs [][]string
+ if b.Body != nil {
+ seqs = append(seqs, strings.Split(b.Body.FieldPath.String(), "."))
+ }
+ for _, p := range b.PathParams {
+ seqs = append(seqs, strings.Split(p.FieldPath.String(), "."))
+ }
+ return queryParamFilter{utilities.NewDoubleArray(seqs)}
+}
+
+// HasEnumPathParam returns true if the path parameter slice contains a parameter
+// that maps to an enum proto field that is not repeated, if not false is returned.
+func (b binding) HasEnumPathParam() bool {
+ return b.hasEnumPathParam(false)
+}
+
+// HasRepeatedEnumPathParam returns true if the path parameter slice contains a parameter
+// that maps to a repeated enum proto field, if not false is returned.
+func (b binding) HasRepeatedEnumPathParam() bool {
+ return b.hasEnumPathParam(true)
+}
+
+// hasEnumPathParam returns true if the path parameter slice contains a parameter
+// that maps to a enum proto field and that the enum proto field is or isn't repeated
+// based on the provided 'repeated' parameter.
+func (b binding) hasEnumPathParam(repeated bool) bool {
+ for _, p := range b.PathParams {
+ if p.IsEnum() && p.IsRepeated() == repeated {
+ return true
+ }
+ }
+ return false
+}
+
+// LookupEnum looks up a enum type by path parameter.
+func (b binding) LookupEnum(p descriptor.Parameter) *descriptor.Enum {
+ e, err := b.Registry.LookupEnum("", p.Target.GetTypeName())
+ if err != nil {
+ return nil
+ }
+ return e
+}
+
+// FieldMaskField returns the golang-style name of the variable for a FieldMask, if there is exactly one of that type in
+// the message. Otherwise, it returns an empty string.
+func (b binding) FieldMaskField() string {
+ var fieldMaskField *descriptor.Field
+ for _, f := range b.Method.RequestType.Fields {
+ if f.GetTypeName() == ".google.protobuf.FieldMask" {
+ // if there is more than 1 FieldMask for this request, then return none
+ if fieldMaskField != nil {
+ return ""
+ }
+ fieldMaskField = f
+ }
+ }
+ if fieldMaskField != nil {
+ return generator2.CamelCase(fieldMaskField.GetName())
+ }
+ return ""
+}
+
+// queryParamFilter is a wrapper of utilities.DoubleArray which provides String() to output DoubleArray.Encoding in a stable and predictable format.
+type queryParamFilter struct {
+ *utilities.DoubleArray
+}
+
+func (f queryParamFilter) String() string {
+ encodings := make([]string, len(f.Encoding))
+ for str, enc := range f.Encoding {
+ encodings[enc] = fmt.Sprintf("%q: %d", str, enc)
+ }
+ e := strings.Join(encodings, ", ")
+ return fmt.Sprintf("&utilities.DoubleArray{Encoding: map[string]int{%s}, Base: %#v, Check: %#v}", e, f.Base, f.Check)
+}
+
+type trailerParams struct {
+ Services []*descriptor.Service
+ UseRequestContext bool
+ RegisterFuncSuffix string
+ AssumeColonVerb bool
+}
+
+func applyTemplate(p param, reg *descriptor.Registry) (string, error) {
+ w := bytes.NewBuffer(nil)
+ if err := headerTemplate.Execute(w, p); err != nil {
+ return "", err
+ }
+ var targetServices []*descriptor.Service
+
+ for _, msg := range p.Messages {
+ msgName := generator2.CamelCase(*msg.Name)
+ msg.Name = &msgName
+ }
+ for _, svc := range p.Services {
+ var methodWithBindingsSeen bool
+ svcName := generator2.CamelCase(*svc.Name)
+ svc.Name = &svcName
+ for _, meth := range svc.Methods {
+ glog.V(2).Infof("Processing %s.%s", svc.GetName(), meth.GetName())
+ methName := generator2.CamelCase(*meth.Name)
+ meth.Name = &methName
+ for _, b := range meth.Bindings {
+ methodWithBindingsSeen = true
+ if err := handlerTemplate.Execute(w, binding{
+ Binding: b,
+ Registry: reg,
+ AllowPatchFeature: p.AllowPatchFeature,
+ }); err != nil {
+ return "", err
+ }
+
+ // Local
+ if err := localHandlerTemplate.Execute(w, binding{
+ Binding: b,
+ Registry: reg,
+ AllowPatchFeature: p.AllowPatchFeature,
+ }); err != nil {
+ return "", err
+ }
+ }
+ }
+ if methodWithBindingsSeen {
+ targetServices = append(targetServices, svc)
+ }
+ }
+ if len(targetServices) == 0 {
+ return "", errNoTargetService
+ }
+
+ assumeColonVerb := true
+ if reg != nil {
+ assumeColonVerb = !reg.GetAllowColonFinalSegments()
+ }
+ tp := trailerParams{
+ Services: targetServices,
+ UseRequestContext: p.UseRequestContext,
+ RegisterFuncSuffix: p.RegisterFuncSuffix,
+ AssumeColonVerb: assumeColonVerb,
+ }
+ // Local
+ if err := localTrailerTemplate.Execute(w, tp); err != nil {
+ return "", err
+ }
+
+ if err := trailerTemplate.Execute(w, tp); err != nil {
+ return "", err
+ }
+ return w.String(), nil
+}
+
+var (
+ headerTemplate = template.Must(template.New("header").Parse(`
+// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
+// source: {{.GetName}}
+
+/*
+Package {{.GoPkg.Name}} is a reverse proxy.
+
+It translates gRPC into RESTful JSON APIs.
+*/
+package {{.GoPkg.Name}}
+import (
+ {{range $i := .Imports}}{{if $i.Standard}}{{$i | printf "%s\n"}}{{end}}{{end}}
+
+ {{range $i := .Imports}}{{if not $i.Standard}}{{$i | printf "%s\n"}}{{end}}{{end}}
+)
+
+// Suppress "imported and not used" errors
+var _ codes.Code
+var _ io.Reader
+var _ status.Status
+var _ = runtime.String
+var _ = utilities.NewDoubleArray
+var _ = descriptor.ForMessage
+`))
+
+ handlerTemplate = template.Must(template.New("handler").Parse(`
+{{if and .Method.GetClientStreaming .Method.GetServerStreaming}}
+{{template "bidi-streaming-request-func" .}}
+{{else if .Method.GetClientStreaming}}
+{{template "client-streaming-request-func" .}}
+{{else}}
+{{template "client-rpc-request-func" .}}
+{{end}}
+`))
+
+ _ = template.Must(handlerTemplate.New("request-func-signature").Parse(strings.Replace(`
+{{if .Method.GetServerStreaming}}
+func request_{{.Method.Service.GetName}}_{{.Method.GetName}}_{{.Index}}(ctx context.Context, marshaler runtime.Marshaler, client {{.Method.Service.GetName}}Client, req *http.Request, pathParams map[string]string) ({{.Method.Service.GetName}}_{{.Method.GetName}}Client, runtime.ServerMetadata, error)
+{{else}}
+func request_{{.Method.Service.GetName}}_{{.Method.GetName}}_{{.Index}}(ctx context.Context, marshaler runtime.Marshaler, client {{.Method.Service.GetName}}Client, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error)
+{{end}}`, "\n", "", -1)))
+
+ _ = template.Must(handlerTemplate.New("client-streaming-request-func").Parse(`
+{{template "request-func-signature" .}} {
+ var metadata runtime.ServerMetadata
+ stream, err := client.{{.Method.GetName}}(ctx)
+ if err != nil {
+ grpclog.Infof("Failed to start streaming: %v", err)
+ return nil, metadata, err
+ }
+ dec := marshaler.NewDecoder(req.Body)
+ for {
+ var protoReq {{.Method.RequestType.GoType .Method.Service.File.GoPkg.Path}}
+ err = dec.Decode(&protoReq)
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ grpclog.Infof("Failed to decode request: %v", err)
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
+ }
+ if err = stream.Send(&protoReq); err != nil {
+ if err == io.EOF {
+ break
+ }
+ grpclog.Infof("Failed to send request: %v", err)
+ return nil, metadata, err
+ }
+ }
+
+ if err := stream.CloseSend(); err != nil {
+ grpclog.Infof("Failed to terminate client stream: %v", err)
+ return nil, metadata, err
+ }
+ header, err := stream.Header()
+ if err != nil {
+ grpclog.Infof("Failed to get header from client: %v", err)
+ return nil, metadata, err
+ }
+ metadata.HeaderMD = header
+{{if .Method.GetServerStreaming}}
+ return stream, metadata, nil
+{{else}}
+ msg, err := stream.CloseAndRecv()
+ metadata.TrailerMD = stream.Trailer()
+ return msg, metadata, err
+{{end}}
+}
+`))
+
+ _ = template.Must(handlerTemplate.New("client-rpc-request-func").Parse(`
+{{$AllowPatchFeature := .AllowPatchFeature}}
+{{if .HasQueryParam}}
+var (
+ filter_{{.Method.Service.GetName}}_{{.Method.GetName}}_{{.Index}} = {{.QueryParamFilter}}
+)
+{{end}}
+{{template "request-func-signature" .}} {
+ var protoReq {{.Method.RequestType.GoType .Method.Service.File.GoPkg.Path}}
+ var metadata runtime.ServerMetadata
+{{if .Body}}
+ newReader, berr := utilities.IOReaderFactory(req.Body)
+ if berr != nil {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
+ }
+ if err := marshaler.NewDecoder(newReader()).Decode(&{{.Body.AssignableExpr "protoReq"}}); err != nil && err != io.EOF {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
+ }
+ {{- if and $AllowPatchFeature (eq (.HTTPMethod) "PATCH") (.FieldMaskField) (not (eq "*" .GetBodyFieldPath)) }}
+ if protoReq.{{.FieldMaskField}} == nil || len(protoReq.{{.FieldMaskField}}.GetPaths()) == 0 {
+ _, md := descriptor.ForMessage(protoReq.{{.GetBodyFieldStructName}})
+ if fieldMask, err := runtime.FieldMaskFromRequestBody(newReader(), md); err != nil {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
+ } else {
+ protoReq.{{.FieldMaskField}} = fieldMask
+ }
+ }
+ {{end}}
+{{end}}
+{{if .PathParams}}
+ var (
+ val string
+{{- if .HasEnumPathParam}}
+ e int32
+{{- end}}
+{{- if .HasRepeatedEnumPathParam}}
+ es []int32
+{{- end}}
+ ok bool
+ err error
+ _ = err
+ )
+ {{$binding := .}}
+ {{range $param := .PathParams}}
+ {{$enum := $binding.LookupEnum $param}}
+ val, ok = pathParams[{{$param | printf "%q"}}]
+ if !ok {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", {{$param | printf "%q"}})
+ }
+{{if $param.IsNestedProto3}}
+ err = runtime.PopulateFieldFromPath(&protoReq, {{$param | printf "%q"}}, val)
+ {{if $enum}}
+ e{{if $param.IsRepeated}}s{{end}}, err = {{$param.ConvertFuncExpr}}(val{{if $param.IsRepeated}}, {{$binding.Registry.GetRepeatedPathParamSeparator | printf "%c" | printf "%q"}}{{end}}, {{$enum.GoType $param.Target.Message.File.GoPkg.Path}}_value)
+ {{end}}
+{{else if $enum}}
+ e{{if $param.IsRepeated}}s{{end}}, err = {{$param.ConvertFuncExpr}}(val{{if $param.IsRepeated}}, {{$binding.Registry.GetRepeatedPathParamSeparator | printf "%c" | printf "%q"}}{{end}}, {{$enum.GoType $param.Target.Message.File.GoPkg.Path}}_value)
+{{else}}
+ {{$param.AssignableExpr "protoReq"}}, err = {{$param.ConvertFuncExpr}}(val{{if $param.IsRepeated}}, {{$binding.Registry.GetRepeatedPathParamSeparator | printf "%c" | printf "%q"}}{{end}})
+{{end}}
+ if err != nil {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", {{$param | printf "%q"}}, err)
+ }
+{{if and $enum $param.IsRepeated}}
+ s := make([]{{$enum.GoType $param.Target.Message.File.GoPkg.Path}}, len(es))
+ for i, v := range es {
+ s[i] = {{$enum.GoType $param.Target.Message.File.GoPkg.Path}}(v)
+ }
+ {{$param.AssignableExpr "protoReq"}} = s
+{{else if $enum}}
+ {{$param.AssignableExpr "protoReq"}} = {{$enum.GoType $param.Target.Message.File.GoPkg.Path}}(e)
+{{end}}
+ {{end}}
+{{end}}
+{{if .HasQueryParam}}
+ if err := req.ParseForm(); err != nil {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
+ }
+ if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_{{.Method.Service.GetName}}_{{.Method.GetName}}_{{.Index}}); err != nil {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
+ }
+{{end}}
+{{if .Method.GetServerStreaming}}
+ stream, err := client.{{.Method.GetName}}(ctx, &protoReq)
+ if err != nil {
+ return nil, metadata, err
+ }
+ header, err := stream.Header()
+ if err != nil {
+ return nil, metadata, err
+ }
+ metadata.HeaderMD = header
+ return stream, metadata, nil
+{{else}}
+ msg, err := client.{{.Method.GetName}}(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
+ return msg, metadata, err
+{{end}}
+}`))
+
+ _ = template.Must(handlerTemplate.New("bidi-streaming-request-func").Parse(`
+{{template "request-func-signature" .}} {
+ var metadata runtime.ServerMetadata
+ stream, err := client.{{.Method.GetName}}(ctx)
+ if err != nil {
+ grpclog.Infof("Failed to start streaming: %v", err)
+ return nil, metadata, err
+ }
+ dec := marshaler.NewDecoder(req.Body)
+ handleSend := func() error {
+ var protoReq {{.Method.RequestType.GoType .Method.Service.File.GoPkg.Path}}
+ err := dec.Decode(&protoReq)
+ if err == io.EOF {
+ return err
+ }
+ if err != nil {
+ grpclog.Infof("Failed to decode request: %v", err)
+ return err
+ }
+ if err := stream.Send(&protoReq); err != nil {
+ grpclog.Infof("Failed to send request: %v", err)
+ return err
+ }
+ return nil
+ }
+ if err := handleSend(); err != nil {
+ if cerr := stream.CloseSend(); cerr != nil {
+ grpclog.Infof("Failed to terminate client stream: %v", cerr)
+ }
+ if err == io.EOF {
+ return stream, metadata, nil
+ }
+ return nil, metadata, err
+ }
+ go func() {
+ for {
+ if err := handleSend(); err != nil {
+ break
+ }
+ }
+ if err := stream.CloseSend(); err != nil {
+ grpclog.Infof("Failed to terminate client stream: %v", err)
+ }
+ }()
+ header, err := stream.Header()
+ if err != nil {
+ grpclog.Infof("Failed to get header from client: %v", err)
+ return nil, metadata, err
+ }
+ metadata.HeaderMD = header
+ return stream, metadata, nil
+}
+`))
+
+ localHandlerTemplate = template.Must(template.New("local-handler").Parse(`
+{{if and .Method.GetClientStreaming .Method.GetServerStreaming}}
+{{else if .Method.GetClientStreaming}}
+{{else if .Method.GetServerStreaming}}
+{{else}}
+{{template "local-client-rpc-request-func" .}}
+{{end}}
+`))
+
+ _ = template.Must(localHandlerTemplate.New("local-request-func-signature").Parse(strings.Replace(`
+{{if .Method.GetServerStreaming}}
+{{else}}
+func local_request_{{.Method.Service.GetName}}_{{.Method.GetName}}_{{.Index}}(ctx context.Context, marshaler runtime.Marshaler, server {{.Method.Service.GetName}}Server, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error)
+{{end}}`, "\n", "", -1)))
+
+ _ = template.Must(localHandlerTemplate.New("local-client-rpc-request-func").Parse(`
+{{$AllowPatchFeature := .AllowPatchFeature}}
+{{template "local-request-func-signature" .}} {
+ var protoReq {{.Method.RequestType.GoType .Method.Service.File.GoPkg.Path}}
+ var metadata runtime.ServerMetadata
+{{if .Body}}
+ newReader, berr := utilities.IOReaderFactory(req.Body)
+ if berr != nil {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
+ }
+ if err := marshaler.NewDecoder(newReader()).Decode(&{{.Body.AssignableExpr "protoReq"}}); err != nil && err != io.EOF {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
+ }
+ {{- if and $AllowPatchFeature (eq (.HTTPMethod) "PATCH") (.FieldMaskField) (not (eq "*" .GetBodyFieldPath)) }}
+ if protoReq.{{.FieldMaskField}} == nil || len(protoReq.{{.FieldMaskField}}.GetPaths()) == 0 {
+ _, md := descriptor.ForMessage(protoReq.{{.GetBodyFieldStructName}})
+ if fieldMask, err := runtime.FieldMaskFromRequestBody(newReader(), md); err != nil {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
+ } else {
+ protoReq.{{.FieldMaskField}} = fieldMask
+ }
+ }
+ {{end}}
+{{end}}
+{{if .PathParams}}
+ var (
+ val string
+{{- if .HasEnumPathParam}}
+ e int32
+{{- end}}
+{{- if .HasRepeatedEnumPathParam}}
+ es []int32
+{{- end}}
+ ok bool
+ err error
+ _ = err
+ )
+ {{$binding := .}}
+ {{range $param := .PathParams}}
+ {{$enum := $binding.LookupEnum $param}}
+ val, ok = pathParams[{{$param | printf "%q"}}]
+ if !ok {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", {{$param | printf "%q"}})
+ }
+{{if $param.IsNestedProto3}}
+ err = runtime.PopulateFieldFromPath(&protoReq, {{$param | printf "%q"}}, val)
+ {{if $enum}}
+ e{{if $param.IsRepeated}}s{{end}}, err = {{$param.ConvertFuncExpr}}(val{{if $param.IsRepeated}}, {{$binding.Registry.GetRepeatedPathParamSeparator | printf "%c" | printf "%q"}}{{end}}, {{$enum.GoType $param.Target.Message.File.GoPkg.Path}}_value)
+ {{end}}
+{{else if $enum}}
+ e{{if $param.IsRepeated}}s{{end}}, err = {{$param.ConvertFuncExpr}}(val{{if $param.IsRepeated}}, {{$binding.Registry.GetRepeatedPathParamSeparator | printf "%c" | printf "%q"}}{{end}}, {{$enum.GoType $param.Target.Message.File.GoPkg.Path}}_value)
+{{else}}
+ {{$param.AssignableExpr "protoReq"}}, err = {{$param.ConvertFuncExpr}}(val{{if $param.IsRepeated}}, {{$binding.Registry.GetRepeatedPathParamSeparator | printf "%c" | printf "%q"}}{{end}})
+{{end}}
+ if err != nil {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", {{$param | printf "%q"}}, err)
+ }
+{{if and $enum $param.IsRepeated}}
+ s := make([]{{$enum.GoType $param.Target.Message.File.GoPkg.Path}}, len(es))
+ for i, v := range es {
+ s[i] = {{$enum.GoType $param.Target.Message.File.GoPkg.Path}}(v)
+ }
+ {{$param.AssignableExpr "protoReq"}} = s
+{{else if $enum}}
+ {{$param.AssignableExpr "protoReq"}} = {{$enum.GoType $param.Target.Message.File.GoPkg.Path}}(e)
+{{end}}
+ {{end}}
+{{end}}
+{{if .HasQueryParam}}
+ if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_{{.Method.Service.GetName}}_{{.Method.GetName}}_{{.Index}}); err != nil {
+ return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
+ }
+{{end}}
+{{if .Method.GetServerStreaming}}
+ // TODO
+{{else}}
+ msg, err := server.{{.Method.GetName}}(ctx, &protoReq)
+ return msg, metadata, err
+{{end}}
+}`))
+
+ localTrailerTemplate = template.Must(template.New("local-trailer").Parse(`
+{{$UseRequestContext := .UseRequestContext}}
+{{range $svc := .Services}}
+// Register{{$svc.GetName}}{{$.RegisterFuncSuffix}}Server registers the http handlers for service {{$svc.GetName}} to "mux".
+// UnaryRPC :call {{$svc.GetName}}Server directly.
+// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
+func Register{{$svc.GetName}}{{$.RegisterFuncSuffix}}Server(ctx context.Context, mux *runtime.ServeMux, server {{$svc.GetName}}Server) error {
+ {{range $m := $svc.Methods}}
+ {{range $b := $m.Bindings}}
+ {{if or $m.GetClientStreaming $m.GetServerStreaming}}
+ mux.Handle({{$b.HTTPMethod | printf "%q"}}, pattern_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
+ err := status.Error(codes.Unimplemented, "streaming calls are not yet supported in the in-process transport")
+ _, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
+ runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
+ return
+ })
+ {{else}}
+ mux.Handle({{$b.HTTPMethod | printf "%q"}}, pattern_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
+ {{- if $UseRequestContext }}
+ ctx, cancel := context.WithCancel(req.Context())
+ {{- else -}}
+ ctx, cancel := context.WithCancel(ctx)
+ {{- end }}
+ defer cancel()
+ inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
+ rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
+ if err != nil {
+ runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
+ return
+ }
+ resp, md, err := local_request_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}(rctx, inboundMarshaler, server, req, pathParams)
+ ctx = runtime.NewServerMetadataContext(ctx, md)
+ if err != nil {
+ runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
+ return
+ }
+
+ {{ if $b.ResponseBody }}
+ forward_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}(ctx, mux, outboundMarshaler, w, req, response_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}{resp}, mux.GetForwardResponseOptions()...)
+ {{ else }}
+ forward_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
+ {{end}}
+ })
+ {{end}}
+ {{end}}
+ {{end}}
+ return nil
+}
+{{end}}`))
+
+ trailerTemplate = template.Must(template.New("trailer").Parse(`
+{{$UseRequestContext := .UseRequestContext}}
+{{range $svc := .Services}}
+// Register{{$svc.GetName}}{{$.RegisterFuncSuffix}}FromEndpoint is same as Register{{$svc.GetName}}{{$.RegisterFuncSuffix}} but
+// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
+func Register{{$svc.GetName}}{{$.RegisterFuncSuffix}}FromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
+ conn, err := grpc.Dial(endpoint, opts...)
+ if err != nil {
+ return err
+ }
+ defer func() {
+ if err != nil {
+ if cerr := conn.Close(); cerr != nil {
+ grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
+ }
+ return
+ }
+ go func() {
+ <-ctx.Done()
+ if cerr := conn.Close(); cerr != nil {
+ grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
+ }
+ }()
+ }()
+
+ return Register{{$svc.GetName}}{{$.RegisterFuncSuffix}}(ctx, mux, conn)
+}
+
+// Register{{$svc.GetName}}{{$.RegisterFuncSuffix}} registers the http handlers for service {{$svc.GetName}} to "mux".
+// The handlers forward requests to the grpc endpoint over "conn".
+func Register{{$svc.GetName}}{{$.RegisterFuncSuffix}}(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
+ return Register{{$svc.GetName}}{{$.RegisterFuncSuffix}}Client(ctx, mux, New{{$svc.GetName}}Client(conn))
+}
+
+// Register{{$svc.GetName}}{{$.RegisterFuncSuffix}}Client registers the http handlers for service {{$svc.GetName}}
+// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "{{$svc.GetName}}Client".
+// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "{{$svc.GetName}}Client"
+// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
+// "{{$svc.GetName}}Client" to call the correct interceptors.
+func Register{{$svc.GetName}}{{$.RegisterFuncSuffix}}Client(ctx context.Context, mux *runtime.ServeMux, client {{$svc.GetName}}Client) error {
+ {{range $m := $svc.Methods}}
+ {{range $b := $m.Bindings}}
+ mux.Handle({{$b.HTTPMethod | printf "%q"}}, pattern_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
+ {{- if $UseRequestContext }}
+ ctx, cancel := context.WithCancel(req.Context())
+ {{- else -}}
+ ctx, cancel := context.WithCancel(ctx)
+ {{- end }}
+ defer cancel()
+ inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
+ rctx, err := runtime.AnnotateContext(ctx, mux, req)
+ if err != nil {
+ runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
+ return
+ }
+ resp, md, err := request_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}(rctx, inboundMarshaler, client, req, pathParams)
+ ctx = runtime.NewServerMetadataContext(ctx, md)
+ if err != nil {
+ runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
+ return
+ }
+ {{if $m.GetServerStreaming}}
+ forward_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}(ctx, mux, outboundMarshaler, w, req, func() (proto.Message, error) { return resp.Recv() }, mux.GetForwardResponseOptions()...)
+ {{else}}
+ {{ if $b.ResponseBody }}
+ forward_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}(ctx, mux, outboundMarshaler, w, req, response_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}{resp}, mux.GetForwardResponseOptions()...)
+ {{ else }}
+ forward_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
+ {{end}}
+ {{end}}
+ })
+ {{end}}
+ {{end}}
+ return nil
+}
+
+{{range $m := $svc.Methods}}
+{{range $b := $m.Bindings}}
+{{if $b.ResponseBody}}
+type response_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}} struct {
+ proto.Message
+}
+
+func (m response_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}) XXX_ResponseBody() interface{} {
+ response := m.Message.(*{{$m.ResponseType.GoType $m.Service.File.GoPkg.Path}})
+ return {{$b.ResponseBody.AssignableExpr "response"}}
+}
+{{end}}
+{{end}}
+{{end}}
+
+var (
+ {{range $m := $svc.Methods}}
+ {{range $b := $m.Bindings}}
+ pattern_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}} = runtime.MustPattern(runtime.NewPattern({{$b.PathTmpl.Version}}, {{$b.PathTmpl.OpCodes | printf "%#v"}}, {{$b.PathTmpl.Pool | printf "%#v"}}, {{$b.PathTmpl.Verb | printf "%q"}}, runtime.AssumeColonVerbOpt({{$.AssumeColonVerb}})))
+ {{end}}
+ {{end}}
+)
+
+var (
+ {{range $m := $svc.Methods}}
+ {{range $b := $m.Bindings}}
+ forward_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}} = {{if $m.GetServerStreaming}}runtime.ForwardResponseStream{{else}}runtime.ForwardResponseMessage{{end}}
+ {{end}}
+ {{end}}
+)
+{{end}}`))
+)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/BUILD.bazel
new file mode 100644
index 0000000..89f94a1
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/BUILD.bazel
@@ -0,0 +1,32 @@
+load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
+
+package(default_visibility = ["//:generators"])
+
+go_library(
+ name = "go_default_library",
+ srcs = [
+ "compile.go",
+ "parse.go",
+ "types.go",
+ ],
+ importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule",
+ deps = [
+ "//utilities:go_default_library",
+ "@com_github_golang_glog//:go_default_library",
+ ],
+)
+
+go_test(
+ name = "go_default_test",
+ size = "small",
+ srcs = [
+ "compile_test.go",
+ "parse_test.go",
+ "types_test.go",
+ ],
+ embed = [":go_default_library"],
+ deps = [
+ "//utilities:go_default_library",
+ "@com_github_golang_glog//:go_default_library",
+ ],
+)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/compile.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/compile.go
new file mode 100644
index 0000000..437039a
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/compile.go
@@ -0,0 +1,117 @@
+package httprule
+
+import (
+ "github.com/grpc-ecosystem/grpc-gateway/utilities"
+)
+
+const (
+ opcodeVersion = 1
+)
+
+// Template is a compiled representation of path templates.
+type Template struct {
+ // Version is the version number of the format.
+ Version int
+ // OpCodes is a sequence of operations.
+ OpCodes []int
+ // Pool is a constant pool
+ Pool []string
+ // Verb is a VERB part in the template.
+ Verb string
+ // Fields is a list of field paths bound in this template.
+ Fields []string
+ // Original template (example: /v1/a_bit_of_everything)
+ Template string
+}
+
+// Compiler compiles utilities representation of path templates into marshallable operations.
+// They can be unmarshalled by runtime.NewPattern.
+type Compiler interface {
+ Compile() Template
+}
+
+type op struct {
+ // code is the opcode of the operation
+ code utilities.OpCode
+
+ // str is a string operand of the code.
+ // num is ignored if str is not empty.
+ str string
+
+ // num is a numeric operand of the code.
+ num int
+}
+
+func (w wildcard) compile() []op {
+ return []op{
+ {code: utilities.OpPush},
+ }
+}
+
+func (w deepWildcard) compile() []op {
+ return []op{
+ {code: utilities.OpPushM},
+ }
+}
+
+func (l literal) compile() []op {
+ return []op{
+ {
+ code: utilities.OpLitPush,
+ str: string(l),
+ },
+ }
+}
+
+func (v variable) compile() []op {
+ var ops []op
+ for _, s := range v.segments {
+ ops = append(ops, s.compile()...)
+ }
+ ops = append(ops, op{
+ code: utilities.OpConcatN,
+ num: len(v.segments),
+ }, op{
+ code: utilities.OpCapture,
+ str: v.path,
+ })
+
+ return ops
+}
+
+func (t template) Compile() Template {
+ var rawOps []op
+ for _, s := range t.segments {
+ rawOps = append(rawOps, s.compile()...)
+ }
+
+ var (
+ ops []int
+ pool []string
+ fields []string
+ )
+ consts := make(map[string]int)
+ for _, op := range rawOps {
+ ops = append(ops, int(op.code))
+ if op.str == "" {
+ ops = append(ops, op.num)
+ } else {
+ if _, ok := consts[op.str]; !ok {
+ consts[op.str] = len(pool)
+ pool = append(pool, op.str)
+ }
+ ops = append(ops, consts[op.str])
+ }
+ if op.code == utilities.OpCapture {
+ fields = append(fields, op.str)
+ }
+ }
+ return Template{
+ Version: opcodeVersion,
+ OpCodes: ops,
+ Pool: pool,
+ Verb: t.verb,
+ Fields: fields,
+ Template: t.template,
+ }
+}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/fuzz.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/fuzz.go
new file mode 100644
index 0000000..138f7c1
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/fuzz.go
@@ -0,0 +1,11 @@
+// +build gofuzz
+
+package httprule
+
+func Fuzz(data []byte) int {
+ _, err := Parse(string(data))
+ if err != nil {
+ return 0
+ }
+ return 0
+}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/parse.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/parse.go
new file mode 100644
index 0000000..f933cd8
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/parse.go
@@ -0,0 +1,351 @@
+package httprule
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/golang/glog"
+)
+
+// InvalidTemplateError indicates that the path template is not valid.
+type InvalidTemplateError struct {
+ tmpl string
+ msg string
+}
+
+func (e InvalidTemplateError) Error() string {
+ return fmt.Sprintf("%s: %s", e.msg, e.tmpl)
+}
+
+// Parse parses the string representation of path template
+func Parse(tmpl string) (Compiler, error) {
+ if !strings.HasPrefix(tmpl, "/") {
+ return template{}, InvalidTemplateError{tmpl: tmpl, msg: "no leading /"}
+ }
+ tokens, verb := tokenize(tmpl[1:])
+
+ p := parser{tokens: tokens}
+ segs, err := p.topLevelSegments()
+ if err != nil {
+ return template{}, InvalidTemplateError{tmpl: tmpl, msg: err.Error()}
+ }
+
+ return template{
+ segments: segs,
+ verb: verb,
+ template: tmpl,
+ }, nil
+}
+
+func tokenize(path string) (tokens []string, verb string) {
+ if path == "" {
+ return []string{eof}, ""
+ }
+
+ const (
+ init = iota
+ field
+ nested
+ )
+ var (
+ st = init
+ )
+ for path != "" {
+ var idx int
+ switch st {
+ case init:
+ idx = strings.IndexAny(path, "/{")
+ case field:
+ idx = strings.IndexAny(path, ".=}")
+ case nested:
+ idx = strings.IndexAny(path, "/}")
+ }
+ if idx < 0 {
+ tokens = append(tokens, path)
+ break
+ }
+ switch r := path[idx]; r {
+ case '/', '.':
+ case '{':
+ st = field
+ case '=':
+ st = nested
+ case '}':
+ st = init
+ }
+ if idx == 0 {
+ tokens = append(tokens, path[idx:idx+1])
+ } else {
+ tokens = append(tokens, path[:idx], path[idx:idx+1])
+ }
+ path = path[idx+1:]
+ }
+
+ l := len(tokens)
+ t := tokens[l-1]
+ if idx := strings.LastIndex(t, ":"); idx == 0 {
+ tokens, verb = tokens[:l-1], t[1:]
+ } else if idx > 0 {
+ tokens[l-1], verb = t[:idx], t[idx+1:]
+ }
+ tokens = append(tokens, eof)
+ return tokens, verb
+}
+
+// parser is a parser of the template syntax defined in github.com/googleapis/googleapis/google/api/http.proto.
+type parser struct {
+ tokens []string
+ accepted []string
+}
+
+// topLevelSegments is the target of this parser.
+func (p *parser) topLevelSegments() ([]segment, error) {
+ glog.V(1).Infof("Parsing %q", p.tokens)
+ segs, err := p.segments()
+ if err != nil {
+ return nil, err
+ }
+ glog.V(2).Infof("accept segments: %q; %q", p.accepted, p.tokens)
+ if _, err := p.accept(typeEOF); err != nil {
+ return nil, fmt.Errorf("unexpected token %q after segments %q", p.tokens[0], strings.Join(p.accepted, ""))
+ }
+ glog.V(2).Infof("accept eof: %q; %q", p.accepted, p.tokens)
+ return segs, nil
+}
+
+func (p *parser) segments() ([]segment, error) {
+ s, err := p.segment()
+ if err != nil {
+ return nil, err
+ }
+ glog.V(2).Infof("accept segment: %q; %q", p.accepted, p.tokens)
+
+ segs := []segment{s}
+ for {
+ if _, err := p.accept("/"); err != nil {
+ return segs, nil
+ }
+ s, err := p.segment()
+ if err != nil {
+ return segs, err
+ }
+ segs = append(segs, s)
+ glog.V(2).Infof("accept segment: %q; %q", p.accepted, p.tokens)
+ }
+}
+
+func (p *parser) segment() (segment, error) {
+ if _, err := p.accept("*"); err == nil {
+ return wildcard{}, nil
+ }
+ if _, err := p.accept("**"); err == nil {
+ return deepWildcard{}, nil
+ }
+ if l, err := p.literal(); err == nil {
+ return l, nil
+ }
+
+ v, err := p.variable()
+ if err != nil {
+ return nil, fmt.Errorf("segment neither wildcards, literal or variable: %v", err)
+ }
+ return v, err
+}
+
+func (p *parser) literal() (segment, error) {
+ lit, err := p.accept(typeLiteral)
+ if err != nil {
+ return nil, err
+ }
+ return literal(lit), nil
+}
+
+func (p *parser) variable() (segment, error) {
+ if _, err := p.accept("{"); err != nil {
+ return nil, err
+ }
+
+ path, err := p.fieldPath()
+ if err != nil {
+ return nil, err
+ }
+
+ var segs []segment
+ if _, err := p.accept("="); err == nil {
+ segs, err = p.segments()
+ if err != nil {
+ return nil, fmt.Errorf("invalid segment in variable %q: %v", path, err)
+ }
+ } else {
+ segs = []segment{wildcard{}}
+ }
+
+ if _, err := p.accept("}"); err != nil {
+ return nil, fmt.Errorf("unterminated variable segment: %s", path)
+ }
+ return variable{
+ path: path,
+ segments: segs,
+ }, nil
+}
+
+func (p *parser) fieldPath() (string, error) {
+ c, err := p.accept(typeIdent)
+ if err != nil {
+ return "", err
+ }
+ components := []string{c}
+ for {
+ if _, err = p.accept("."); err != nil {
+ return strings.Join(components, "."), nil
+ }
+ c, err := p.accept(typeIdent)
+ if err != nil {
+ return "", fmt.Errorf("invalid field path component: %v", err)
+ }
+ components = append(components, c)
+ }
+}
+
+// A termType is a type of terminal symbols.
+type termType string
+
+// These constants define some of valid values of termType.
+// They improve readability of parse functions.
+//
+// You can also use "/", "*", "**", "." or "=" as valid values.
+const (
+ typeIdent = termType("ident")
+ typeLiteral = termType("literal")
+ typeEOF = termType("$")
+)
+
+const (
+ // eof is the terminal symbol which always appears at the end of token sequence.
+ eof = "\u0000"
+)
+
+// accept tries to accept a token in "p".
+// This function consumes a token and returns it if it matches to the specified "term".
+// If it doesn't match, the function does not consume any tokens and return an error.
+func (p *parser) accept(term termType) (string, error) {
+ t := p.tokens[0]
+ switch term {
+ case "/", "*", "**", ".", "=", "{", "}":
+ if t != string(term) && t != "/" {
+ return "", fmt.Errorf("expected %q but got %q", term, t)
+ }
+ case typeEOF:
+ if t != eof {
+ return "", fmt.Errorf("expected EOF but got %q", t)
+ }
+ case typeIdent:
+ if err := expectIdent(t); err != nil {
+ return "", err
+ }
+ case typeLiteral:
+ if err := expectPChars(t); err != nil {
+ return "", err
+ }
+ default:
+ return "", fmt.Errorf("unknown termType %q", term)
+ }
+ p.tokens = p.tokens[1:]
+ p.accepted = append(p.accepted, t)
+ return t, nil
+}
+
+// expectPChars determines if "t" consists of only pchars defined in RFC3986.
+//
+// https://www.ietf.org/rfc/rfc3986.txt, P.49
+// pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
+// unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
+// sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
+// / "*" / "+" / "," / ";" / "="
+// pct-encoded = "%" HEXDIG HEXDIG
+func expectPChars(t string) error {
+ const (
+ init = iota
+ pct1
+ pct2
+ )
+ st := init
+ for _, r := range t {
+ if st != init {
+ if !isHexDigit(r) {
+ return fmt.Errorf("invalid hexdigit: %c(%U)", r, r)
+ }
+ switch st {
+ case pct1:
+ st = pct2
+ case pct2:
+ st = init
+ }
+ continue
+ }
+
+ // unreserved
+ switch {
+ case 'A' <= r && r <= 'Z':
+ continue
+ case 'a' <= r && r <= 'z':
+ continue
+ case '0' <= r && r <= '9':
+ continue
+ }
+ switch r {
+ case '-', '.', '_', '~':
+ // unreserved
+ case '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=':
+ // sub-delims
+ case ':', '@':
+ // rest of pchar
+ case '%':
+ // pct-encoded
+ st = pct1
+ default:
+ return fmt.Errorf("invalid character in path segment: %q(%U)", r, r)
+ }
+ }
+ if st != init {
+ return fmt.Errorf("invalid percent-encoding in %q", t)
+ }
+ return nil
+}
+
+// expectIdent determines if "ident" is a valid identifier in .proto schema ([[:alpha:]_][[:alphanum:]_]*).
+func expectIdent(ident string) error {
+ if ident == "" {
+ return fmt.Errorf("empty identifier")
+ }
+ for pos, r := range ident {
+ switch {
+ case '0' <= r && r <= '9':
+ if pos == 0 {
+ return fmt.Errorf("identifier starting with digit: %s", ident)
+ }
+ continue
+ case 'A' <= r && r <= 'Z':
+ continue
+ case 'a' <= r && r <= 'z':
+ continue
+ case r == '_':
+ continue
+ default:
+ return fmt.Errorf("invalid character %q(%U) in identifier: %s", r, r, ident)
+ }
+ }
+ return nil
+}
+
+func isHexDigit(r rune) bool {
+ switch {
+ case '0' <= r && r <= '9':
+ return true
+ case 'A' <= r && r <= 'F':
+ return true
+ case 'a' <= r && r <= 'f':
+ return true
+ }
+ return false
+}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/types.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/types.go
new file mode 100644
index 0000000..5a814a0
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/types.go
@@ -0,0 +1,60 @@
+package httprule
+
+import (
+ "fmt"
+ "strings"
+)
+
+type template struct {
+ segments []segment
+ verb string
+ template string
+}
+
+type segment interface {
+ fmt.Stringer
+ compile() (ops []op)
+}
+
+type wildcard struct{}
+
+type deepWildcard struct{}
+
+type literal string
+
+type variable struct {
+ path string
+ segments []segment
+}
+
+func (wildcard) String() string {
+ return "*"
+}
+
+func (deepWildcard) String() string {
+ return "**"
+}
+
+func (l literal) String() string {
+ return string(l)
+}
+
+func (v variable) String() string {
+ var segs []string
+ for _, s := range v.segments {
+ segs = append(segs, s.String())
+ }
+ return fmt.Sprintf("{%s=%s}", v.path, strings.Join(segs, "/"))
+}
+
+func (t template) String() string {
+ var segs []string
+ for _, s := range t.segments {
+ segs = append(segs, s.String())
+ }
+ str := strings.Join(segs, "/")
+ if t.verb != "" {
+ str = fmt.Sprintf("%s:%s", str, t.verb)
+ }
+ return "/" + str
+}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/main.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/main.go
new file mode 100644
index 0000000..291ba7d
--- /dev/null
+++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/main.go
@@ -0,0 +1,141 @@
+// Command protoc-gen-grpc-gateway is a plugin for Google protocol buffer
+// compiler to generate a reverse-proxy, which converts incoming RESTful
+// HTTP/1 requests gRPC invocation.
+// You rarely need to run this program directly. Instead, put this program
+// into your $PATH with a name "protoc-gen-grpc-gateway" and run
+// protoc --grpc-gateway_out=output_directory path/to/input.proto
+//
+// See README.md for more details.
+package main
+
+import (
+ "flag"
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/golang/glog"
+ "github.com/golang/protobuf/proto"
+ plugin "github.com/golang/protobuf/protoc-gen-go/plugin"
+ "github.com/grpc-ecosystem/grpc-gateway/codegenerator"
+ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor"
+ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway"
+)
+
+var (
+ importPrefix = flag.String("import_prefix", "", "prefix to be added to go package paths for imported proto files")
+ importPath = flag.String("import_path", "", "used as the package if no input files declare go_package. If it contains slashes, everything up to the rightmost slash is ignored.")
+ registerFuncSuffix = flag.String("register_func_suffix", "Handler", "used to construct names of generated Register*<Suffix> methods.")
+ useRequestContext = flag.Bool("request_context", true, "determine whether to use http.Request's context or not")
+ allowDeleteBody = flag.Bool("allow_delete_body", false, "unless set, HTTP DELETE methods may not have a body")
+ grpcAPIConfiguration = flag.String("grpc_api_configuration", "", "path to gRPC API Configuration in YAML format")
+ pathType = flag.String("paths", "", "specifies how the paths of generated files are structured")
+ allowRepeatedFieldsInBody = flag.Bool("allow_repeated_fields_in_body", false, "allows to use repeated field in `body` and `response_body` field of `google.api.http` annotation option")
+ repeatedPathParamSeparator = flag.String("repeated_path_param_separator", "csv", "configures how repeated fields should be split. Allowed values are `csv`, `pipes`, `ssv` and `tsv`.")
+ allowPatchFeature = flag.Bool("allow_patch_feature", true, "determines whether to use PATCH feature involving update masks (using google.protobuf.FieldMask).")
+ allowColonFinalSegments = flag.Bool("allow_colon_final_segments", false, "determines whether colons are permitted in the final segment of a path")
+ versionFlag = flag.Bool("version", false, "print the current verison")
+)
+
+// Variables set by goreleaser at build time
+var (
+ version = "dev"
+ commit = "unknown"
+ date = "unknown"
+)
+
+func main() {
+ flag.Parse()
+ defer glog.Flush()
+
+ if *versionFlag {
+ fmt.Printf("Version %v, commit %v, built at %v\n", version, commit, date)
+ os.Exit(0)
+ }
+
+ reg := descriptor.NewRegistry()
+
+ glog.V(1).Info("Parsing code generator request")
+ req, err := codegenerator.ParseRequest(os.Stdin)
+ if err != nil {
+ glog.Fatal(err)
+ }
+ glog.V(1).Info("Parsed code generator request")
+ if req.Parameter != nil {
+ for _, p := range strings.Split(req.GetParameter(), ",") {
+ spec := strings.SplitN(p, "=", 2)
+ if len(spec) == 1 {
+ if err := flag.CommandLine.Set(spec[0], ""); err != nil {
+ glog.Fatalf("Cannot set flag %s", p)
+ }
+ continue
+ }
+ name, value := spec[0], spec[1]
+ if strings.HasPrefix(name, "M") {
+ reg.AddPkgMap(name[1:], value)
+ continue
+ }
+ if err := flag.CommandLine.Set(name, value); err != nil {
+ glog.Fatalf("Cannot set flag %s", p)
+ }
+ }
+ }
+
+ g := gengateway.New(reg, *useRequestContext, *registerFuncSuffix, *pathType, *allowPatchFeature)
+
+ if *grpcAPIConfiguration != "" {
+ if err := reg.LoadGrpcAPIServiceFromYAML(*grpcAPIConfiguration); err != nil {
+ emitError(err)
+ return
+ }
+ }
+
+ reg.SetPrefix(*importPrefix)
+ reg.SetImportPath(*importPath)
+ reg.SetAllowDeleteBody(*allowDeleteBody)
+ reg.SetAllowRepeatedFieldsInBody(*allowRepeatedFieldsInBody)
+ reg.SetAllowColonFinalSegments(*allowColonFinalSegments)
+ if err := reg.SetRepeatedPathParamSeparator(*repeatedPathParamSeparator); err != nil {
+ emitError(err)
+ return
+ }
+ if err := reg.Load(req); err != nil {
+ emitError(err)
+ return
+ }
+
+ var targets []*descriptor.File
+ for _, target := range req.FileToGenerate {
+ f, err := reg.LookupFile(target)
+ if err != nil {
+ glog.Fatal(err)
+ }
+ targets = append(targets, f)
+ }
+
+ out, err := g.Generate(targets)
+ glog.V(1).Info("Processed code generator request")
+ if err != nil {
+ emitError(err)
+ return
+ }
+ emitFiles(out)
+}
+
+func emitFiles(out []*plugin.CodeGeneratorResponse_File) {
+ emitResp(&plugin.CodeGeneratorResponse{File: out})
+}
+
+func emitError(err error) {
+ emitResp(&plugin.CodeGeneratorResponse{Error: proto.String(err.Error())})
+}
+
+func emitResp(resp *plugin.CodeGeneratorResponse) {
+ buf, err := proto.Marshal(resp)
+ if err != nil {
+ glog.Fatal(err)
+ }
+ if _, err := os.Stdout.Write(buf); err != nil {
+ glog.Fatal(err)
+ }
+}