Import of https://github.com/ciena/voltctl at commit 40d61fbf3f910ed4017cf67c9c79e8e1f82a33a5

Change-Id: I8464c59e60d76cb8612891db3303878975b5416c
diff --git a/vendor/github.com/jhump/protoreflect/desc/convert.go b/vendor/github.com/jhump/protoreflect/desc/convert.go
new file mode 100644
index 0000000..6573c90
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/convert.go
@@ -0,0 +1,216 @@
+package desc
+
+import (
+	"errors"
+	"fmt"
+	"strings"
+
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/desc/internal"
+	intn "github.com/jhump/protoreflect/internal"
+)
+
+// CreateFileDescriptor instantiates a new file descriptor for the given descriptor proto.
+// The file's direct dependencies must be provided. If the given dependencies do not include
+// all of the file's dependencies or if the contents of the descriptors are internally
+// inconsistent (e.g. contain unresolvable symbols) then an error is returned.
+func CreateFileDescriptor(fd *dpb.FileDescriptorProto, deps ...*FileDescriptor) (*FileDescriptor, error) {
+	return createFileDescriptor(fd, deps, nil)
+}
+
+func createFileDescriptor(fd *dpb.FileDescriptorProto, deps []*FileDescriptor, r *ImportResolver) (*FileDescriptor, error) {
+	ret := &FileDescriptor{
+		proto:      fd,
+		symbols:    map[string]Descriptor{},
+		fieldIndex: map[string]map[int32]*FieldDescriptor{},
+	}
+	pkg := fd.GetPackage()
+
+	// populate references to file descriptor dependencies
+	files := map[string]*FileDescriptor{}
+	for _, f := range deps {
+		files[f.proto.GetName()] = f
+	}
+	ret.deps = make([]*FileDescriptor, len(fd.GetDependency()))
+	for i, d := range fd.GetDependency() {
+		resolved := r.ResolveImport(fd.GetName(), d)
+		ret.deps[i] = files[resolved]
+		if ret.deps[i] == nil {
+			if resolved != d {
+				ret.deps[i] = files[d]
+			}
+			if ret.deps[i] == nil {
+				return nil, intn.ErrNoSuchFile(d)
+			}
+		}
+	}
+	ret.publicDeps = make([]*FileDescriptor, len(fd.GetPublicDependency()))
+	for i, pd := range fd.GetPublicDependency() {
+		ret.publicDeps[i] = ret.deps[pd]
+	}
+	ret.weakDeps = make([]*FileDescriptor, len(fd.GetWeakDependency()))
+	for i, wd := range fd.GetWeakDependency() {
+		ret.weakDeps[i] = ret.deps[wd]
+	}
+	ret.isProto3 = fd.GetSyntax() == "proto3"
+
+	// populate all tables of child descriptors
+	for _, m := range fd.GetMessageType() {
+		md, n := createMessageDescriptor(ret, ret, pkg, m, ret.symbols)
+		ret.symbols[n] = md
+		ret.messages = append(ret.messages, md)
+	}
+	for _, e := range fd.GetEnumType() {
+		ed, n := createEnumDescriptor(ret, ret, pkg, e, ret.symbols)
+		ret.symbols[n] = ed
+		ret.enums = append(ret.enums, ed)
+	}
+	for _, ex := range fd.GetExtension() {
+		exd, n := createFieldDescriptor(ret, ret, pkg, ex)
+		ret.symbols[n] = exd
+		ret.extensions = append(ret.extensions, exd)
+	}
+	for _, s := range fd.GetService() {
+		sd, n := createServiceDescriptor(ret, pkg, s, ret.symbols)
+		ret.symbols[n] = sd
+		ret.services = append(ret.services, sd)
+	}
+
+	ret.sourceInfo = internal.CreateSourceInfoMap(fd)
+	ret.sourceInfoRecomputeFunc = ret.recomputeSourceInfo
+
+	// now we can resolve all type references and source code info
+	scopes := []scope{fileScope(ret)}
+	path := make([]int32, 1, 8)
+	path[0] = internal.File_messagesTag
+	for i, md := range ret.messages {
+		if err := md.resolve(append(path, int32(i)), scopes); err != nil {
+			return nil, err
+		}
+	}
+	path[0] = internal.File_enumsTag
+	for i, ed := range ret.enums {
+		ed.resolve(append(path, int32(i)))
+	}
+	path[0] = internal.File_extensionsTag
+	for i, exd := range ret.extensions {
+		if err := exd.resolve(append(path, int32(i)), scopes); err != nil {
+			return nil, err
+		}
+	}
+	path[0] = internal.File_servicesTag
+	for i, sd := range ret.services {
+		if err := sd.resolve(append(path, int32(i)), scopes); err != nil {
+			return nil, err
+		}
+	}
+
+	return ret, nil
+}
+
+// CreateFileDescriptors constructs a set of descriptors, one for each of the
+// given descriptor protos. The given set of descriptor protos must include all
+// transitive dependencies for every file.
+func CreateFileDescriptors(fds []*dpb.FileDescriptorProto) (map[string]*FileDescriptor, error) {
+	return createFileDescriptors(fds, nil)
+}
+
+func createFileDescriptors(fds []*dpb.FileDescriptorProto, r *ImportResolver) (map[string]*FileDescriptor, error) {
+	if len(fds) == 0 {
+		return nil, nil
+	}
+	files := map[string]*dpb.FileDescriptorProto{}
+	resolved := map[string]*FileDescriptor{}
+	var name string
+	for _, fd := range fds {
+		name = fd.GetName()
+		files[name] = fd
+	}
+	for _, fd := range fds {
+		_, err := createFromSet(fd.GetName(), r, nil, files, resolved)
+		if err != nil {
+			return nil, err
+		}
+	}
+	return resolved, nil
+}
+
+// ToFileDescriptorSet creates a FileDescriptorSet proto that contains all of the given
+// file descriptors and their transitive dependencies. The files are topologically sorted
+// so that a file will always appear after its dependencies.
+func ToFileDescriptorSet(fds ...*FileDescriptor) *dpb.FileDescriptorSet {
+	var fdps []*dpb.FileDescriptorProto
+	addAllFiles(fds, &fdps, map[string]struct{}{})
+	return &dpb.FileDescriptorSet{File: fdps}
+}
+
+func addAllFiles(src []*FileDescriptor, results *[]*dpb.FileDescriptorProto, seen map[string]struct{}) {
+	for _, fd := range src {
+		if _, ok := seen[fd.GetName()]; ok {
+			continue
+		}
+		seen[fd.GetName()] = struct{}{}
+		addAllFiles(fd.GetDependencies(), results, seen)
+		*results = append(*results, fd.AsFileDescriptorProto())
+	}
+}
+
+// CreateFileDescriptorFromSet creates a descriptor from the given file descriptor set. The
+// set's *last* file will be the returned descriptor. The set's remaining files must comprise
+// the full set of transitive dependencies of that last file. This is the same format and
+// order used by protoc when emitting a FileDescriptorSet file with an invocation like so:
+//    protoc --descriptor_set_out=./test.protoset --include_imports -I. test.proto
+func CreateFileDescriptorFromSet(fds *dpb.FileDescriptorSet) (*FileDescriptor, error) {
+	return createFileDescriptorFromSet(fds, nil)
+}
+
+func createFileDescriptorFromSet(fds *dpb.FileDescriptorSet, r *ImportResolver) (*FileDescriptor, error) {
+	files := fds.GetFile()
+	if len(files) == 0 {
+		return nil, errors.New("file descriptor set is empty")
+	}
+	resolved, err := createFileDescriptors(files, r)
+	if err != nil {
+		return nil, err
+	}
+	lastFilename := files[len(files)-1].GetName()
+	return resolved[lastFilename], nil
+}
+
+// createFromSet creates a descriptor for the given filename. It recursively
+// creates descriptors for the given file's dependencies.
+func createFromSet(filename string, r *ImportResolver, seen []string, files map[string]*dpb.FileDescriptorProto, resolved map[string]*FileDescriptor) (*FileDescriptor, error) {
+	for _, s := range seen {
+		if filename == s {
+			return nil, fmt.Errorf("cycle in imports: %s", strings.Join(append(seen, filename), " -> "))
+		}
+	}
+	seen = append(seen, filename)
+
+	if d, ok := resolved[filename]; ok {
+		return d, nil
+	}
+	fdp := files[filename]
+	if fdp == nil {
+		return nil, intn.ErrNoSuchFile(filename)
+	}
+	deps := make([]*FileDescriptor, len(fdp.GetDependency()))
+	for i, depName := range fdp.GetDependency() {
+		resolvedDep := r.ResolveImport(filename, depName)
+		dep, err := createFromSet(resolvedDep, r, seen, files, resolved)
+		if _, ok := err.(intn.ErrNoSuchFile); ok && resolvedDep != depName {
+			dep, err = createFromSet(depName, r, seen, files, resolved)
+		}
+		if err != nil {
+			return nil, err
+		}
+		deps[i] = dep
+	}
+	d, err := createFileDescriptor(fdp, deps, r)
+	if err != nil {
+		return nil, err
+	}
+	resolved[filename] = d
+	return d, nil
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/descriptor.go b/vendor/github.com/jhump/protoreflect/desc/descriptor.go
new file mode 100644
index 0000000..ab235a3
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/descriptor.go
@@ -0,0 +1,1666 @@
+package desc
+
+import (
+	"bytes"
+	"fmt"
+	"sort"
+	"strconv"
+	"strings"
+	"unicode/utf8"
+
+	"github.com/golang/protobuf/proto"
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/desc/internal"
+)
+
+// Descriptor is the common interface implemented by all descriptor objects.
+type Descriptor interface {
+	// GetName returns the name of the object described by the descriptor. This will
+	// be a base name that does not include enclosing message names or the package name.
+	// For file descriptors, this indicates the path and name to the described file.
+	GetName() string
+	// GetFullyQualifiedName returns the fully-qualified name of the object described by
+	// the descriptor. This will include the package name and any enclosing message names.
+	// For file descriptors, this returns the path and name to the described file (same as
+	// GetName).
+	GetFullyQualifiedName() string
+	// GetParent returns the enclosing element in a proto source file. If the described
+	// object is a top-level object, this returns the file descriptor. Otherwise, it returns
+	// the element in which the described object was declared. File descriptors have no
+	// parent and return nil.
+	GetParent() Descriptor
+	// GetFile returns the file descriptor in which this element was declared. File
+	// descriptors return themselves.
+	GetFile() *FileDescriptor
+	// GetOptions returns the options proto containing options for the described element.
+	GetOptions() proto.Message
+	// GetSourceInfo returns any source code information that was present in the file
+	// descriptor. Source code info is optional. If no source code info is available for
+	// the element (including if there is none at all in the file descriptor) then this
+	// returns nil
+	GetSourceInfo() *dpb.SourceCodeInfo_Location
+	// AsProto returns the underlying descriptor proto for this descriptor.
+	AsProto() proto.Message
+}
+
+type sourceInfoRecomputeFunc = internal.SourceInfoComputeFunc
+
+// FileDescriptor describes a proto source file.
+type FileDescriptor struct {
+	proto      *dpb.FileDescriptorProto
+	symbols    map[string]Descriptor
+	deps       []*FileDescriptor
+	publicDeps []*FileDescriptor
+	weakDeps   []*FileDescriptor
+	messages   []*MessageDescriptor
+	enums      []*EnumDescriptor
+	extensions []*FieldDescriptor
+	services   []*ServiceDescriptor
+	fieldIndex map[string]map[int32]*FieldDescriptor
+	isProto3   bool
+	sourceInfo internal.SourceInfoMap
+	sourceInfoRecomputeFunc
+}
+
+func (fd *FileDescriptor) recomputeSourceInfo() {
+	internal.PopulateSourceInfoMap(fd.proto, fd.sourceInfo)
+}
+
+func (fd *FileDescriptor) registerField(field *FieldDescriptor) {
+	fields := fd.fieldIndex[field.owner.GetFullyQualifiedName()]
+	if fields == nil {
+		fields = map[int32]*FieldDescriptor{}
+		fd.fieldIndex[field.owner.GetFullyQualifiedName()] = fields
+	}
+	fields[field.GetNumber()] = field
+}
+
+// GetName returns the name of the file, as it was given to the protoc invocation
+// to compile it, possibly including path (relative to a directory in the proto
+// import path).
+func (fd *FileDescriptor) GetName() string {
+	return fd.proto.GetName()
+}
+
+// GetFullyQualifiedName returns the name of the file, same as GetName. It is
+// present to satisfy the Descriptor interface.
+func (fd *FileDescriptor) GetFullyQualifiedName() string {
+	return fd.proto.GetName()
+}
+
+// GetPackage returns the name of the package declared in the file.
+func (fd *FileDescriptor) GetPackage() string {
+	return fd.proto.GetPackage()
+}
+
+// GetParent always returns nil: files are the root of descriptor hierarchies.
+// Is it present to satisfy the Descriptor interface.
+func (fd *FileDescriptor) GetParent() Descriptor {
+	return nil
+}
+
+// GetFile returns the receiver, which is a file descriptor. This is present
+// to satisfy the Descriptor interface.
+func (fd *FileDescriptor) GetFile() *FileDescriptor {
+	return fd
+}
+
+// GetOptions returns the file's options. Most usages will be more interested
+// in GetFileOptions, which has a concrete return type. This generic version
+// is present to satisfy the Descriptor interface.
+func (fd *FileDescriptor) GetOptions() proto.Message {
+	return fd.proto.GetOptions()
+}
+
+// GetFileOptions returns the file's options.
+func (fd *FileDescriptor) GetFileOptions() *dpb.FileOptions {
+	return fd.proto.GetOptions()
+}
+
+// GetSourceInfo returns nil for files. It is present to satisfy the Descriptor
+// interface.
+func (fd *FileDescriptor) GetSourceInfo() *dpb.SourceCodeInfo_Location {
+	return nil
+}
+
+// AsProto returns the underlying descriptor proto. Most usages will be more
+// interested in AsFileDescriptorProto, which has a concrete return type. This
+// generic version is present to satisfy the Descriptor interface.
+func (fd *FileDescriptor) AsProto() proto.Message {
+	return fd.proto
+}
+
+// AsFileDescriptorProto returns the underlying descriptor proto.
+func (fd *FileDescriptor) AsFileDescriptorProto() *dpb.FileDescriptorProto {
+	return fd.proto
+}
+
+// String returns the underlying descriptor proto, in compact text format.
+func (fd *FileDescriptor) String() string {
+	return fd.proto.String()
+}
+
+// IsProto3 returns true if the file declares a syntax of "proto3".
+func (fd *FileDescriptor) IsProto3() bool {
+	return fd.isProto3
+}
+
+// GetDependencies returns all of this file's dependencies. These correspond to
+// import statements in the file.
+func (fd *FileDescriptor) GetDependencies() []*FileDescriptor {
+	return fd.deps
+}
+
+// GetPublicDependencies returns all of this file's public dependencies. These
+// correspond to public import statements in the file.
+func (fd *FileDescriptor) GetPublicDependencies() []*FileDescriptor {
+	return fd.publicDeps
+}
+
+// GetWeakDependencies returns all of this file's weak dependencies. These
+// correspond to weak import statements in the file.
+func (fd *FileDescriptor) GetWeakDependencies() []*FileDescriptor {
+	return fd.weakDeps
+}
+
+// GetMessageTypes returns all top-level messages declared in this file.
+func (fd *FileDescriptor) GetMessageTypes() []*MessageDescriptor {
+	return fd.messages
+}
+
+// GetEnumTypes returns all top-level enums declared in this file.
+func (fd *FileDescriptor) GetEnumTypes() []*EnumDescriptor {
+	return fd.enums
+}
+
+// GetExtensions returns all top-level extensions declared in this file.
+func (fd *FileDescriptor) GetExtensions() []*FieldDescriptor {
+	return fd.extensions
+}
+
+// GetServices returns all services declared in this file.
+func (fd *FileDescriptor) GetServices() []*ServiceDescriptor {
+	return fd.services
+}
+
+// FindSymbol returns the descriptor contained within this file for the
+// element with the given fully-qualified symbol name. If no such element
+// exists then this method returns nil.
+func (fd *FileDescriptor) FindSymbol(symbol string) Descriptor {
+	if symbol[0] == '.' {
+		symbol = symbol[1:]
+	}
+	return fd.symbols[symbol]
+}
+
+// FindMessage finds the message with the given fully-qualified name. If no
+// such element exists in this file then nil is returned.
+func (fd *FileDescriptor) FindMessage(msgName string) *MessageDescriptor {
+	if md, ok := fd.symbols[msgName].(*MessageDescriptor); ok {
+		return md
+	} else {
+		return nil
+	}
+}
+
+// FindEnum finds the enum with the given fully-qualified name. If no such
+// element exists in this file then nil is returned.
+func (fd *FileDescriptor) FindEnum(enumName string) *EnumDescriptor {
+	if ed, ok := fd.symbols[enumName].(*EnumDescriptor); ok {
+		return ed
+	} else {
+		return nil
+	}
+}
+
+// FindService finds the service with the given fully-qualified name. If no
+// such element exists in this file then nil is returned.
+func (fd *FileDescriptor) FindService(serviceName string) *ServiceDescriptor {
+	if sd, ok := fd.symbols[serviceName].(*ServiceDescriptor); ok {
+		return sd
+	} else {
+		return nil
+	}
+}
+
+// FindExtension finds the extension field for the given extended type name and
+// tag number. If no such element exists in this file then nil is returned.
+func (fd *FileDescriptor) FindExtension(extendeeName string, tagNumber int32) *FieldDescriptor {
+	if exd, ok := fd.fieldIndex[extendeeName][tagNumber]; ok && exd.IsExtension() {
+		return exd
+	} else {
+		return nil
+	}
+}
+
+// FindExtensionByName finds the extension field with the given fully-qualified
+// name. If no such element exists in this file then nil is returned.
+func (fd *FileDescriptor) FindExtensionByName(extName string) *FieldDescriptor {
+	if exd, ok := fd.symbols[extName].(*FieldDescriptor); ok && exd.IsExtension() {
+		return exd
+	} else {
+		return nil
+	}
+}
+
+// MessageDescriptor describes a protocol buffer message.
+type MessageDescriptor struct {
+	proto          *dpb.DescriptorProto
+	parent         Descriptor
+	file           *FileDescriptor
+	fields         []*FieldDescriptor
+	nested         []*MessageDescriptor
+	enums          []*EnumDescriptor
+	extensions     []*FieldDescriptor
+	oneOfs         []*OneOfDescriptor
+	extRanges      extRanges
+	fqn            string
+	sourceInfoPath []int32
+	jsonNames      jsonNameMap
+	isProto3       bool
+	isMapEntry     bool
+}
+
+func createMessageDescriptor(fd *FileDescriptor, parent Descriptor, enclosing string, md *dpb.DescriptorProto, symbols map[string]Descriptor) (*MessageDescriptor, string) {
+	msgName := merge(enclosing, md.GetName())
+	ret := &MessageDescriptor{proto: md, parent: parent, file: fd, fqn: msgName}
+	for _, f := range md.GetField() {
+		fld, n := createFieldDescriptor(fd, ret, msgName, f)
+		symbols[n] = fld
+		ret.fields = append(ret.fields, fld)
+	}
+	for _, nm := range md.NestedType {
+		nmd, n := createMessageDescriptor(fd, ret, msgName, nm, symbols)
+		symbols[n] = nmd
+		ret.nested = append(ret.nested, nmd)
+	}
+	for _, e := range md.EnumType {
+		ed, n := createEnumDescriptor(fd, ret, msgName, e, symbols)
+		symbols[n] = ed
+		ret.enums = append(ret.enums, ed)
+	}
+	for _, ex := range md.GetExtension() {
+		exd, n := createFieldDescriptor(fd, ret, msgName, ex)
+		symbols[n] = exd
+		ret.extensions = append(ret.extensions, exd)
+	}
+	for i, o := range md.GetOneofDecl() {
+		od, n := createOneOfDescriptor(fd, ret, i, msgName, o)
+		symbols[n] = od
+		ret.oneOfs = append(ret.oneOfs, od)
+	}
+	for _, r := range md.GetExtensionRange() {
+		// proto.ExtensionRange is inclusive (and that's how extension ranges are defined in code).
+		// but protoc converts range to exclusive end in descriptor, so we must convert back
+		end := r.GetEnd() - 1
+		ret.extRanges = append(ret.extRanges, proto.ExtensionRange{
+			Start: r.GetStart(),
+			End:   end})
+	}
+	sort.Sort(ret.extRanges)
+	ret.isProto3 = fd.isProto3
+	ret.isMapEntry = md.GetOptions().GetMapEntry() &&
+		len(ret.fields) == 2 &&
+		ret.fields[0].GetNumber() == 1 &&
+		ret.fields[1].GetNumber() == 2
+
+	return ret, msgName
+}
+
+func (md *MessageDescriptor) resolve(path []int32, scopes []scope) error {
+	md.sourceInfoPath = append([]int32(nil), path...) // defensive copy
+	path = append(path, internal.Message_nestedMessagesTag)
+	scopes = append(scopes, messageScope(md))
+	for i, nmd := range md.nested {
+		if err := nmd.resolve(append(path, int32(i)), scopes); err != nil {
+			return err
+		}
+	}
+	path[len(path)-1] = internal.Message_enumsTag
+	for i, ed := range md.enums {
+		ed.resolve(append(path, int32(i)))
+	}
+	path[len(path)-1] = internal.Message_fieldsTag
+	for i, fld := range md.fields {
+		if err := fld.resolve(append(path, int32(i)), scopes); err != nil {
+			return err
+		}
+	}
+	path[len(path)-1] = internal.Message_extensionsTag
+	for i, exd := range md.extensions {
+		if err := exd.resolve(append(path, int32(i)), scopes); err != nil {
+			return err
+		}
+	}
+	path[len(path)-1] = internal.Message_oneOfsTag
+	for i, od := range md.oneOfs {
+		od.resolve(append(path, int32(i)))
+	}
+	return nil
+}
+
+// GetName returns the simple (unqualified) name of the message.
+func (md *MessageDescriptor) GetName() string {
+	return md.proto.GetName()
+}
+
+// GetFullyQualifiedName returns the fully qualified name of the message. This
+// includes the package name (if there is one) as well as the names of any
+// enclosing messages.
+func (md *MessageDescriptor) GetFullyQualifiedName() string {
+	return md.fqn
+}
+
+// GetParent returns the message's enclosing descriptor. For top-level messages,
+// this will be a file descriptor. Otherwise it will be the descriptor for the
+// enclosing message.
+func (md *MessageDescriptor) GetParent() Descriptor {
+	return md.parent
+}
+
+// GetFile returns the descriptor for the file in which this message is defined.
+func (md *MessageDescriptor) GetFile() *FileDescriptor {
+	return md.file
+}
+
+// GetOptions returns the message's options. Most usages will be more interested
+// in GetMessageOptions, which has a concrete return type. This generic version
+// is present to satisfy the Descriptor interface.
+func (md *MessageDescriptor) GetOptions() proto.Message {
+	return md.proto.GetOptions()
+}
+
+// GetMessageOptions returns the message's options.
+func (md *MessageDescriptor) GetMessageOptions() *dpb.MessageOptions {
+	return md.proto.GetOptions()
+}
+
+// GetSourceInfo returns source info for the message, if present in the
+// descriptor. Not all descriptors will contain source info. If non-nil, the
+// returned info contains information about the location in the file where the
+// message was defined and also contains comments associated with the message
+// definition.
+func (md *MessageDescriptor) GetSourceInfo() *dpb.SourceCodeInfo_Location {
+	return md.file.sourceInfo.Get(md.sourceInfoPath)
+}
+
+// AsProto returns the underlying descriptor proto. Most usages will be more
+// interested in AsDescriptorProto, which has a concrete return type. This
+// generic version is present to satisfy the Descriptor interface.
+func (md *MessageDescriptor) AsProto() proto.Message {
+	return md.proto
+}
+
+// AsDescriptorProto returns the underlying descriptor proto.
+func (md *MessageDescriptor) AsDescriptorProto() *dpb.DescriptorProto {
+	return md.proto
+}
+
+// String returns the underlying descriptor proto, in compact text format.
+func (md *MessageDescriptor) String() string {
+	return md.proto.String()
+}
+
+// IsMapEntry returns true if this is a synthetic message type that represents an entry
+// in a map field.
+func (md *MessageDescriptor) IsMapEntry() bool {
+	return md.isMapEntry
+}
+
+// GetFields returns all of the fields for this message.
+func (md *MessageDescriptor) GetFields() []*FieldDescriptor {
+	return md.fields
+}
+
+// GetNestedMessageTypes returns all of the message types declared inside this message.
+func (md *MessageDescriptor) GetNestedMessageTypes() []*MessageDescriptor {
+	return md.nested
+}
+
+// GetNestedEnumTypes returns all of the enums declared inside this message.
+func (md *MessageDescriptor) GetNestedEnumTypes() []*EnumDescriptor {
+	return md.enums
+}
+
+// GetNestedExtensions returns all of the extensions declared inside this message.
+func (md *MessageDescriptor) GetNestedExtensions() []*FieldDescriptor {
+	return md.extensions
+}
+
+// GetOneOfs returns all of the one-of field sets declared inside this message.
+func (md *MessageDescriptor) GetOneOfs() []*OneOfDescriptor {
+	return md.oneOfs
+}
+
+// IsProto3 returns true if the file in which this message is defined declares a syntax of "proto3".
+func (md *MessageDescriptor) IsProto3() bool {
+	return md.isProto3
+}
+
+// GetExtensionRanges returns the ranges of extension field numbers for this message.
+func (md *MessageDescriptor) GetExtensionRanges() []proto.ExtensionRange {
+	return md.extRanges
+}
+
+// IsExtendable returns true if this message has any extension ranges.
+func (md *MessageDescriptor) IsExtendable() bool {
+	return len(md.extRanges) > 0
+}
+
+// IsExtension returns true if the given tag number is within any of this message's
+// extension ranges.
+func (md *MessageDescriptor) IsExtension(tagNumber int32) bool {
+	return md.extRanges.IsExtension(tagNumber)
+}
+
+type extRanges []proto.ExtensionRange
+
+func (er extRanges) String() string {
+	var buf bytes.Buffer
+	first := true
+	for _, r := range er {
+		if first {
+			first = false
+		} else {
+			buf.WriteString(",")
+		}
+		fmt.Fprintf(&buf, "%d..%d", r.Start, r.End)
+	}
+	return buf.String()
+}
+
+func (er extRanges) IsExtension(tagNumber int32) bool {
+	i := sort.Search(len(er), func(i int) bool { return er[i].End >= tagNumber })
+	return i < len(er) && tagNumber >= er[i].Start
+}
+
+func (er extRanges) Len() int {
+	return len(er)
+}
+
+func (er extRanges) Less(i, j int) bool {
+	return er[i].Start < er[j].Start
+}
+
+func (er extRanges) Swap(i, j int) {
+	er[i], er[j] = er[j], er[i]
+}
+
+// FindFieldByName finds the field with the given name. If no such field exists
+// then nil is returned. Only regular fields are returned, not extensions.
+func (md *MessageDescriptor) FindFieldByName(fieldName string) *FieldDescriptor {
+	fqn := fmt.Sprintf("%s.%s", md.fqn, fieldName)
+	if fd, ok := md.file.symbols[fqn].(*FieldDescriptor); ok && !fd.IsExtension() {
+		return fd
+	} else {
+		return nil
+	}
+}
+
+// FindFieldByNumber finds the field with the given tag number. If no such field
+// exists then nil is returned. Only regular fields are returned, not extensions.
+func (md *MessageDescriptor) FindFieldByNumber(tagNumber int32) *FieldDescriptor {
+	if fd, ok := md.file.fieldIndex[md.fqn][tagNumber]; ok && !fd.IsExtension() {
+		return fd
+	} else {
+		return nil
+	}
+}
+
+// FieldDescriptor describes a field of a protocol buffer message.
+type FieldDescriptor struct {
+	proto          *dpb.FieldDescriptorProto
+	parent         Descriptor
+	owner          *MessageDescriptor
+	file           *FileDescriptor
+	oneOf          *OneOfDescriptor
+	msgType        *MessageDescriptor
+	enumType       *EnumDescriptor
+	fqn            string
+	sourceInfoPath []int32
+	def            memoizedDefault
+	isMap          bool
+}
+
+func createFieldDescriptor(fd *FileDescriptor, parent Descriptor, enclosing string, fld *dpb.FieldDescriptorProto) (*FieldDescriptor, string) {
+	fldName := merge(enclosing, fld.GetName())
+	ret := &FieldDescriptor{proto: fld, parent: parent, file: fd, fqn: fldName}
+	if fld.GetExtendee() == "" {
+		ret.owner = parent.(*MessageDescriptor)
+	}
+	// owner for extensions, field type (be it message or enum), and one-ofs get resolved later
+	return ret, fldName
+}
+
+func (fd *FieldDescriptor) resolve(path []int32, scopes []scope) error {
+	if fd.proto.OneofIndex != nil && fd.oneOf == nil {
+		return fmt.Errorf("could not link field %s to one-of index %d", fd.fqn, *fd.proto.OneofIndex)
+	}
+	fd.sourceInfoPath = append([]int32(nil), path...) // defensive copy
+	if fd.proto.GetType() == dpb.FieldDescriptorProto_TYPE_ENUM {
+		if desc, err := resolve(fd.file, fd.proto.GetTypeName(), scopes); err != nil {
+			return err
+		} else {
+			fd.enumType = desc.(*EnumDescriptor)
+		}
+	}
+	if fd.proto.GetType() == dpb.FieldDescriptorProto_TYPE_MESSAGE || fd.proto.GetType() == dpb.FieldDescriptorProto_TYPE_GROUP {
+		if desc, err := resolve(fd.file, fd.proto.GetTypeName(), scopes); err != nil {
+			return err
+		} else {
+			fd.msgType = desc.(*MessageDescriptor)
+		}
+	}
+	if fd.proto.GetExtendee() != "" {
+		if desc, err := resolve(fd.file, fd.proto.GetExtendee(), scopes); err != nil {
+			return err
+		} else {
+			fd.owner = desc.(*MessageDescriptor)
+		}
+	}
+	fd.file.registerField(fd)
+	fd.isMap = fd.proto.GetLabel() == dpb.FieldDescriptorProto_LABEL_REPEATED &&
+		fd.proto.GetType() == dpb.FieldDescriptorProto_TYPE_MESSAGE &&
+		fd.GetMessageType().IsMapEntry()
+	return nil
+}
+
+func (fd *FieldDescriptor) determineDefault() interface{} {
+	if fd.IsMap() {
+		return map[interface{}]interface{}(nil)
+	} else if fd.IsRepeated() {
+		return []interface{}(nil)
+	} else if fd.msgType != nil {
+		return nil
+	}
+
+	proto3 := fd.file.isProto3
+	if !proto3 {
+		def := fd.AsFieldDescriptorProto().GetDefaultValue()
+		if def != "" {
+			ret := parseDefaultValue(fd, def)
+			if ret != nil {
+				return ret
+			}
+			// if we can't parse default value, fall-through to return normal default...
+		}
+	}
+
+	switch fd.GetType() {
+	case dpb.FieldDescriptorProto_TYPE_FIXED32,
+		dpb.FieldDescriptorProto_TYPE_UINT32:
+		return uint32(0)
+	case dpb.FieldDescriptorProto_TYPE_SFIXED32,
+		dpb.FieldDescriptorProto_TYPE_INT32,
+		dpb.FieldDescriptorProto_TYPE_SINT32:
+		return int32(0)
+	case dpb.FieldDescriptorProto_TYPE_FIXED64,
+		dpb.FieldDescriptorProto_TYPE_UINT64:
+		return uint64(0)
+	case dpb.FieldDescriptorProto_TYPE_SFIXED64,
+		dpb.FieldDescriptorProto_TYPE_INT64,
+		dpb.FieldDescriptorProto_TYPE_SINT64:
+		return int64(0)
+	case dpb.FieldDescriptorProto_TYPE_FLOAT:
+		return float32(0.0)
+	case dpb.FieldDescriptorProto_TYPE_DOUBLE:
+		return float64(0.0)
+	case dpb.FieldDescriptorProto_TYPE_BOOL:
+		return false
+	case dpb.FieldDescriptorProto_TYPE_BYTES:
+		return []byte(nil)
+	case dpb.FieldDescriptorProto_TYPE_STRING:
+		return ""
+	case dpb.FieldDescriptorProto_TYPE_ENUM:
+		if proto3 {
+			return int32(0)
+		}
+		enumVals := fd.GetEnumType().GetValues()
+		if len(enumVals) > 0 {
+			return enumVals[0].GetNumber()
+		} else {
+			return int32(0) // WTF?
+		}
+	default:
+		panic(fmt.Sprintf("Unknown field type: %v", fd.GetType()))
+	}
+}
+
+func parseDefaultValue(fd *FieldDescriptor, val string) interface{} {
+	switch fd.GetType() {
+	case dpb.FieldDescriptorProto_TYPE_ENUM:
+		vd := fd.GetEnumType().FindValueByName(val)
+		if vd != nil {
+			return vd.GetNumber()
+		}
+		return nil
+	case dpb.FieldDescriptorProto_TYPE_BOOL:
+		if val == "true" {
+			return true
+		} else if val == "false" {
+			return false
+		}
+		return nil
+	case dpb.FieldDescriptorProto_TYPE_BYTES:
+		return []byte(unescape(val))
+	case dpb.FieldDescriptorProto_TYPE_STRING:
+		return val
+	case dpb.FieldDescriptorProto_TYPE_FLOAT:
+		if f, err := strconv.ParseFloat(val, 32); err == nil {
+			return float32(f)
+		} else {
+			return float32(0)
+		}
+	case dpb.FieldDescriptorProto_TYPE_DOUBLE:
+		if f, err := strconv.ParseFloat(val, 64); err == nil {
+			return f
+		} else {
+			return float64(0)
+		}
+	case dpb.FieldDescriptorProto_TYPE_INT32,
+		dpb.FieldDescriptorProto_TYPE_SINT32,
+		dpb.FieldDescriptorProto_TYPE_SFIXED32:
+		if i, err := strconv.ParseInt(val, 10, 32); err == nil {
+			return int32(i)
+		} else {
+			return int32(0)
+		}
+	case dpb.FieldDescriptorProto_TYPE_UINT32,
+		dpb.FieldDescriptorProto_TYPE_FIXED32:
+		if i, err := strconv.ParseUint(val, 10, 32); err == nil {
+			return uint32(i)
+		} else {
+			return uint32(0)
+		}
+	case dpb.FieldDescriptorProto_TYPE_INT64,
+		dpb.FieldDescriptorProto_TYPE_SINT64,
+		dpb.FieldDescriptorProto_TYPE_SFIXED64:
+		if i, err := strconv.ParseInt(val, 10, 64); err == nil {
+			return i
+		} else {
+			return int64(0)
+		}
+	case dpb.FieldDescriptorProto_TYPE_UINT64,
+		dpb.FieldDescriptorProto_TYPE_FIXED64:
+		if i, err := strconv.ParseUint(val, 10, 64); err == nil {
+			return i
+		} else {
+			return uint64(0)
+		}
+	default:
+		return nil
+	}
+}
+
+func unescape(s string) string {
+	// protoc encodes default values for 'bytes' fields using C escaping,
+	// so this function reverses that escaping
+	out := make([]byte, 0, len(s))
+	var buf [4]byte
+	for len(s) > 0 {
+		if s[0] != '\\' || len(s) < 2 {
+			// not escape sequence, or too short to be well-formed escape
+			out = append(out, s[0])
+			s = s[1:]
+		} else if s[1] == 'x' || s[1] == 'X' {
+			n := matchPrefix(s[2:], 2, isHex)
+			if n == 0 {
+				// bad escape
+				out = append(out, s[:2]...)
+				s = s[2:]
+			} else {
+				c, err := strconv.ParseUint(s[2:2+n], 16, 8)
+				if err != nil {
+					// shouldn't really happen...
+					out = append(out, s[:2+n]...)
+				} else {
+					out = append(out, byte(c))
+				}
+				s = s[2+n:]
+			}
+		} else if s[1] >= '0' && s[1] <= '7' {
+			n := 1 + matchPrefix(s[2:], 2, isOctal)
+			c, err := strconv.ParseUint(s[1:1+n], 8, 8)
+			if err != nil || c > 0xff {
+				out = append(out, s[:1+n]...)
+			} else {
+				out = append(out, byte(c))
+			}
+			s = s[1+n:]
+		} else if s[1] == 'u' {
+			if len(s) < 6 {
+				// bad escape
+				out = append(out, s...)
+				s = s[len(s):]
+			} else {
+				c, err := strconv.ParseUint(s[2:6], 16, 16)
+				if err != nil {
+					// bad escape
+					out = append(out, s[:6]...)
+				} else {
+					w := utf8.EncodeRune(buf[:], rune(c))
+					out = append(out, buf[:w]...)
+				}
+				s = s[6:]
+			}
+		} else if s[1] == 'U' {
+			if len(s) < 10 {
+				// bad escape
+				out = append(out, s...)
+				s = s[len(s):]
+			} else {
+				c, err := strconv.ParseUint(s[2:10], 16, 32)
+				if err != nil || c > 0x10ffff {
+					// bad escape
+					out = append(out, s[:10]...)
+				} else {
+					w := utf8.EncodeRune(buf[:], rune(c))
+					out = append(out, buf[:w]...)
+				}
+				s = s[10:]
+			}
+		} else {
+			switch s[1] {
+			case 'a':
+				out = append(out, '\a')
+			case 'b':
+				out = append(out, '\b')
+			case 'f':
+				out = append(out, '\f')
+			case 'n':
+				out = append(out, '\n')
+			case 'r':
+				out = append(out, '\r')
+			case 't':
+				out = append(out, '\t')
+			case 'v':
+				out = append(out, '\v')
+			case '\\':
+				out = append(out, '\\')
+			case '\'':
+				out = append(out, '\'')
+			case '"':
+				out = append(out, '"')
+			case '?':
+				out = append(out, '?')
+			default:
+				// invalid escape, just copy it as-is
+				out = append(out, s[:2]...)
+			}
+			s = s[2:]
+		}
+	}
+	return string(out)
+}
+
+func isOctal(b byte) bool { return b >= '0' && b <= '7' }
+func isHex(b byte) bool {
+	return (b >= '0' && b <= '9') || (b >= 'a' && b <= 'f') || (b >= 'A' && b <= 'F')
+}
+func matchPrefix(s string, limit int, fn func(byte) bool) int {
+	l := len(s)
+	if l > limit {
+		l = limit
+	}
+	i := 0
+	for ; i < l; i++ {
+		if !fn(s[i]) {
+			return i
+		}
+	}
+	return i
+}
+
+// GetName returns the name of the field.
+func (fd *FieldDescriptor) GetName() string {
+	return fd.proto.GetName()
+}
+
+// GetNumber returns the tag number of this field.
+func (fd *FieldDescriptor) GetNumber() int32 {
+	return fd.proto.GetNumber()
+}
+
+// GetFullyQualifiedName returns the fully qualified name of the field. Unlike
+// GetName, this includes fully qualified name of the enclosing message for
+// regular fields.
+//
+// For extension fields, this includes the package (if there is one) as well as
+// any enclosing messages. The package and/or enclosing messages are for where
+// the extension is defined, not the message it extends.
+//
+// If this field is part of a one-of, the fully qualified name does *not*
+// include the name of the one-of, only of the enclosing message.
+func (fd *FieldDescriptor) GetFullyQualifiedName() string {
+	return fd.fqn
+}
+
+// GetParent returns the fields's enclosing descriptor. For normal
+// (non-extension) fields, this is the enclosing message. For extensions, this
+// is the descriptor in which the extension is defined, not the message that is
+// extended. The parent for an extension may be a file descriptor or a message,
+// depending on where the extension is defined.
+func (fd *FieldDescriptor) GetParent() Descriptor {
+	return fd.parent
+}
+
+// GetFile returns the descriptor for the file in which this field is defined.
+func (fd *FieldDescriptor) GetFile() *FileDescriptor {
+	return fd.file
+}
+
+// GetOptions returns the field's options. Most usages will be more interested
+// in GetFieldOptions, which has a concrete return type. This generic version
+// is present to satisfy the Descriptor interface.
+func (fd *FieldDescriptor) GetOptions() proto.Message {
+	return fd.proto.GetOptions()
+}
+
+// GetFieldOptions returns the field's options.
+func (fd *FieldDescriptor) GetFieldOptions() *dpb.FieldOptions {
+	return fd.proto.GetOptions()
+}
+
+// GetSourceInfo returns source info for the field, if present in the
+// descriptor. Not all descriptors will contain source info. If non-nil, the
+// returned info contains information about the location in the file where the
+// field was defined and also contains comments associated with the field
+// definition.
+func (fd *FieldDescriptor) GetSourceInfo() *dpb.SourceCodeInfo_Location {
+	return fd.file.sourceInfo.Get(fd.sourceInfoPath)
+}
+
+// AsProto returns the underlying descriptor proto. Most usages will be more
+// interested in AsFieldDescriptorProto, which has a concrete return type. This
+// generic version is present to satisfy the Descriptor interface.
+func (fd *FieldDescriptor) AsProto() proto.Message {
+	return fd.proto
+}
+
+// AsFieldDescriptorProto returns the underlying descriptor proto.
+func (fd *FieldDescriptor) AsFieldDescriptorProto() *dpb.FieldDescriptorProto {
+	return fd.proto
+}
+
+// String returns the underlying descriptor proto, in compact text format.
+func (fd *FieldDescriptor) String() string {
+	return fd.proto.String()
+}
+
+// GetJSONName returns the name of the field as referenced in the message's JSON
+// format.
+func (fd *FieldDescriptor) GetJSONName() string {
+	if jsonName := fd.proto.GetJsonName(); jsonName != "" {
+		return jsonName
+	}
+	return fd.proto.GetName()
+}
+
+// GetFullyQualifiedJSONName returns the JSON format name (same as GetJSONName),
+// but includes the fully qualified name of the enclosing message.
+//
+// If the field is an extension, it will return the package name (if there is
+// one) as well as the names of any enclosing messages. The package and/or
+// enclosing messages are for where the extension is defined, not the message it
+// extends.
+func (fd *FieldDescriptor) GetFullyQualifiedJSONName() string {
+	parent := fd.GetParent()
+	switch parent := parent.(type) {
+	case *FileDescriptor:
+		pkg := parent.GetPackage()
+		if pkg == "" {
+			return fd.GetJSONName()
+		}
+		return fmt.Sprintf("%s.%s", pkg, fd.GetJSONName())
+	default:
+		return fmt.Sprintf("%s.%s", parent.GetFullyQualifiedName(), fd.GetJSONName())
+	}
+}
+
+// GetOwner returns the message type that this field belongs to. If this is a normal
+// field then this is the same as GetParent. But for extensions, this will be the
+// extendee message whereas GetParent refers to where the extension was declared.
+func (fd *FieldDescriptor) GetOwner() *MessageDescriptor {
+	return fd.owner
+}
+
+// IsExtension returns true if this is an extension field.
+func (fd *FieldDescriptor) IsExtension() bool {
+	return fd.proto.GetExtendee() != ""
+}
+
+// GetOneOf returns the one-of field set to which this field belongs. If this field
+// is not part of a one-of then this method returns nil.
+func (fd *FieldDescriptor) GetOneOf() *OneOfDescriptor {
+	return fd.oneOf
+}
+
+// GetType returns the type of this field. If the type indicates an enum, the
+// enum type can be queried via GetEnumType. If the type indicates a message, the
+// message type can be queried via GetMessageType.
+func (fd *FieldDescriptor) GetType() dpb.FieldDescriptorProto_Type {
+	return fd.proto.GetType()
+}
+
+// GetLabel returns the label for this field. The label can be required (proto2-only),
+// optional (default for proto3), or required.
+func (fd *FieldDescriptor) GetLabel() dpb.FieldDescriptorProto_Label {
+	return fd.proto.GetLabel()
+}
+
+// IsRequired returns true if this field has the "required" label.
+func (fd *FieldDescriptor) IsRequired() bool {
+	return fd.proto.GetLabel() == dpb.FieldDescriptorProto_LABEL_REQUIRED
+}
+
+// IsRepeated returns true if this field has the "repeated" label.
+func (fd *FieldDescriptor) IsRepeated() bool {
+	return fd.proto.GetLabel() == dpb.FieldDescriptorProto_LABEL_REPEATED
+}
+
+// IsMap returns true if this is a map field. If so, it will have the "repeated"
+// label its type will be a message that represents a map entry. The map entry
+// message will have exactly two fields: tag #1 is the key and tag #2 is the value.
+func (fd *FieldDescriptor) IsMap() bool {
+	return fd.isMap
+}
+
+// GetMapKeyType returns the type of the key field if this is a map field. If it is
+// not a map field, nil is returned.
+func (fd *FieldDescriptor) GetMapKeyType() *FieldDescriptor {
+	if fd.isMap {
+		return fd.msgType.FindFieldByNumber(int32(1))
+	}
+	return nil
+}
+
+// GetMapValueType returns the type of the value field if this is a map field. If it
+// is not a map field, nil is returned.
+func (fd *FieldDescriptor) GetMapValueType() *FieldDescriptor {
+	if fd.isMap {
+		return fd.msgType.FindFieldByNumber(int32(2))
+	}
+	return nil
+}
+
+// GetMessageType returns the type of this field if it is a message type. If
+// this field is not a message type, it returns nil.
+func (fd *FieldDescriptor) GetMessageType() *MessageDescriptor {
+	return fd.msgType
+}
+
+// GetEnumType returns the type of this field if it is an enum type. If this
+// field is not an enum type, it returns nil.
+func (fd *FieldDescriptor) GetEnumType() *EnumDescriptor {
+	return fd.enumType
+}
+
+// GetDefaultValue returns the default value for this field.
+//
+// If this field represents a message type, this method always returns nil (even though
+// for proto2 files, the default value should be a default instance of the message type).
+// If the field represents an enum type, this method returns an int32 corresponding to the
+// enum value. If this field is a map, it returns a nil map[interface{}]interface{}. If
+// this field is repeated (and not a map), it returns a nil []interface{}.
+//
+// Otherwise, it returns the declared default value for the field or a zero value, if no
+// default is declared or if the file is proto3. The type of said return value corresponds
+// to the type of the field:
+//  +-------------------------+-----------+
+//  |       Declared Type     |  Go Type  |
+//  +-------------------------+-----------+
+//  | int32, sint32, sfixed32 | int32     |
+//  | int64, sint64, sfixed64 | int64     |
+//  | uint32, fixed32         | uint32    |
+//  | uint64, fixed64         | uint64    |
+//  | float                   | float32   |
+//  | double                  | double32  |
+//  | bool                    | bool      |
+//  | string                  | string    |
+//  | bytes                   | []byte    |
+//  +-------------------------+-----------+
+func (fd *FieldDescriptor) GetDefaultValue() interface{} {
+	return fd.getDefaultValue()
+}
+
+// EnumDescriptor describes an enum declared in a proto file.
+type EnumDescriptor struct {
+	proto          *dpb.EnumDescriptorProto
+	parent         Descriptor
+	file           *FileDescriptor
+	values         []*EnumValueDescriptor
+	valuesByNum    sortedValues
+	fqn            string
+	sourceInfoPath []int32
+}
+
+func createEnumDescriptor(fd *FileDescriptor, parent Descriptor, enclosing string, ed *dpb.EnumDescriptorProto, symbols map[string]Descriptor) (*EnumDescriptor, string) {
+	enumName := merge(enclosing, ed.GetName())
+	ret := &EnumDescriptor{proto: ed, parent: parent, file: fd, fqn: enumName}
+	for _, ev := range ed.GetValue() {
+		evd, n := createEnumValueDescriptor(fd, ret, enumName, ev)
+		symbols[n] = evd
+		ret.values = append(ret.values, evd)
+	}
+	if len(ret.values) > 0 {
+		ret.valuesByNum = make(sortedValues, len(ret.values))
+		copy(ret.valuesByNum, ret.values)
+		sort.Stable(ret.valuesByNum)
+	}
+	return ret, enumName
+}
+
+type sortedValues []*EnumValueDescriptor
+
+func (sv sortedValues) Len() int {
+	return len(sv)
+}
+
+func (sv sortedValues) Less(i, j int) bool {
+	return sv[i].GetNumber() < sv[j].GetNumber()
+}
+
+func (sv sortedValues) Swap(i, j int) {
+	sv[i], sv[j] = sv[j], sv[i]
+}
+
+func (ed *EnumDescriptor) resolve(path []int32) {
+	ed.sourceInfoPath = append([]int32(nil), path...) // defensive copy
+	path = append(path, internal.Enum_valuesTag)
+	for i, evd := range ed.values {
+		evd.resolve(append(path, int32(i)))
+	}
+}
+
+// GetName returns the simple (unqualified) name of the enum type.
+func (ed *EnumDescriptor) GetName() string {
+	return ed.proto.GetName()
+}
+
+// GetFullyQualifiedName returns the fully qualified name of the enum type.
+// This includes the package name (if there is one) as well as the names of any
+// enclosing messages.
+func (ed *EnumDescriptor) GetFullyQualifiedName() string {
+	return ed.fqn
+}
+
+// GetParent returns the enum type's enclosing descriptor. For top-level enums,
+// this will be a file descriptor. Otherwise it will be the descriptor for the
+// enclosing message.
+func (ed *EnumDescriptor) GetParent() Descriptor {
+	return ed.parent
+}
+
+// GetFile returns the descriptor for the file in which this enum is defined.
+func (ed *EnumDescriptor) GetFile() *FileDescriptor {
+	return ed.file
+}
+
+// GetOptions returns the enum type's options. Most usages will be more
+// interested in GetEnumOptions, which has a concrete return type. This generic
+// version is present to satisfy the Descriptor interface.
+func (ed *EnumDescriptor) GetOptions() proto.Message {
+	return ed.proto.GetOptions()
+}
+
+// GetEnumOptions returns the enum type's options.
+func (ed *EnumDescriptor) GetEnumOptions() *dpb.EnumOptions {
+	return ed.proto.GetOptions()
+}
+
+// GetSourceInfo returns source info for the enum type, if present in the
+// descriptor. Not all descriptors will contain source info. If non-nil, the
+// returned info contains information about the location in the file where the
+// enum type was defined and also contains comments associated with the enum
+// definition.
+func (ed *EnumDescriptor) GetSourceInfo() *dpb.SourceCodeInfo_Location {
+	return ed.file.sourceInfo.Get(ed.sourceInfoPath)
+}
+
+// AsProto returns the underlying descriptor proto. Most usages will be more
+// interested in AsEnumDescriptorProto, which has a concrete return type. This
+// generic version is present to satisfy the Descriptor interface.
+func (ed *EnumDescriptor) AsProto() proto.Message {
+	return ed.proto
+}
+
+// AsEnumDescriptorProto returns the underlying descriptor proto.
+func (ed *EnumDescriptor) AsEnumDescriptorProto() *dpb.EnumDescriptorProto {
+	return ed.proto
+}
+
+// String returns the underlying descriptor proto, in compact text format.
+func (ed *EnumDescriptor) String() string {
+	return ed.proto.String()
+}
+
+// GetValues returns all of the allowed values defined for this enum.
+func (ed *EnumDescriptor) GetValues() []*EnumValueDescriptor {
+	return ed.values
+}
+
+// FindValueByName finds the enum value with the given name. If no such value exists
+// then nil is returned.
+func (ed *EnumDescriptor) FindValueByName(name string) *EnumValueDescriptor {
+	fqn := fmt.Sprintf("%s.%s", ed.fqn, name)
+	if vd, ok := ed.file.symbols[fqn].(*EnumValueDescriptor); ok {
+		return vd
+	} else {
+		return nil
+	}
+}
+
+// FindValueByNumber finds the value with the given numeric value. If no such value
+// exists then nil is returned. If aliases are allowed and multiple values have the
+// given number, the first declared value is returned.
+func (ed *EnumDescriptor) FindValueByNumber(num int32) *EnumValueDescriptor {
+	index := sort.Search(len(ed.valuesByNum), func(i int) bool { return ed.valuesByNum[i].GetNumber() >= num })
+	if index < len(ed.valuesByNum) {
+		vd := ed.valuesByNum[index]
+		if vd.GetNumber() == num {
+			return vd
+		}
+	}
+	return nil
+}
+
+// EnumValueDescriptor describes an allowed value of an enum declared in a proto file.
+type EnumValueDescriptor struct {
+	proto          *dpb.EnumValueDescriptorProto
+	parent         *EnumDescriptor
+	file           *FileDescriptor
+	fqn            string
+	sourceInfoPath []int32
+}
+
+func createEnumValueDescriptor(fd *FileDescriptor, parent *EnumDescriptor, enclosing string, evd *dpb.EnumValueDescriptorProto) (*EnumValueDescriptor, string) {
+	valName := merge(enclosing, evd.GetName())
+	return &EnumValueDescriptor{proto: evd, parent: parent, file: fd, fqn: valName}, valName
+}
+
+func (vd *EnumValueDescriptor) resolve(path []int32) {
+	vd.sourceInfoPath = append([]int32(nil), path...) // defensive copy
+}
+
+// GetName returns the name of the enum value.
+func (vd *EnumValueDescriptor) GetName() string {
+	return vd.proto.GetName()
+}
+
+// GetNumber returns the numeric value associated with this enum value.
+func (vd *EnumValueDescriptor) GetNumber() int32 {
+	return vd.proto.GetNumber()
+}
+
+// GetFullyQualifiedName returns the fully qualified name of the enum value.
+// Unlike GetName, this includes fully qualified name of the enclosing enum.
+func (vd *EnumValueDescriptor) GetFullyQualifiedName() string {
+	return vd.fqn
+}
+
+// GetParent returns the descriptor for the enum in which this enum value is
+// defined. Most usages will prefer to use GetEnum, which has a concrete return
+// type. This more generic method is present to satisfy the Descriptor interface.
+func (vd *EnumValueDescriptor) GetParent() Descriptor {
+	return vd.parent
+}
+
+// GetEnum returns the enum in which this enum value is defined.
+func (vd *EnumValueDescriptor) GetEnum() *EnumDescriptor {
+	return vd.parent
+}
+
+// GetFile returns the descriptor for the file in which this enum value is
+// defined.
+func (vd *EnumValueDescriptor) GetFile() *FileDescriptor {
+	return vd.file
+}
+
+// GetOptions returns the enum value's options. Most usages will be more
+// interested in GetEnumValueOptions, which has a concrete return type. This
+// generic version is present to satisfy the Descriptor interface.
+func (vd *EnumValueDescriptor) GetOptions() proto.Message {
+	return vd.proto.GetOptions()
+}
+
+// GetEnumValueOptions returns the enum value's options.
+func (vd *EnumValueDescriptor) GetEnumValueOptions() *dpb.EnumValueOptions {
+	return vd.proto.GetOptions()
+}
+
+// GetSourceInfo returns source info for the enum value, if present in the
+// descriptor. Not all descriptors will contain source info. If non-nil, the
+// returned info contains information about the location in the file where the
+// enum value was defined and also contains comments associated with the enum
+// value definition.
+func (vd *EnumValueDescriptor) GetSourceInfo() *dpb.SourceCodeInfo_Location {
+	return vd.file.sourceInfo.Get(vd.sourceInfoPath)
+}
+
+// AsProto returns the underlying descriptor proto. Most usages will be more
+// interested in AsEnumValueDescriptorProto, which has a concrete return type.
+// This generic version is present to satisfy the Descriptor interface.
+func (vd *EnumValueDescriptor) AsProto() proto.Message {
+	return vd.proto
+}
+
+// AsEnumValueDescriptorProto returns the underlying descriptor proto.
+func (vd *EnumValueDescriptor) AsEnumValueDescriptorProto() *dpb.EnumValueDescriptorProto {
+	return vd.proto
+}
+
+// String returns the underlying descriptor proto, in compact text format.
+func (vd *EnumValueDescriptor) String() string {
+	return vd.proto.String()
+}
+
+// ServiceDescriptor describes an RPC service declared in a proto file.
+type ServiceDescriptor struct {
+	proto          *dpb.ServiceDescriptorProto
+	file           *FileDescriptor
+	methods        []*MethodDescriptor
+	fqn            string
+	sourceInfoPath []int32
+}
+
+func createServiceDescriptor(fd *FileDescriptor, enclosing string, sd *dpb.ServiceDescriptorProto, symbols map[string]Descriptor) (*ServiceDescriptor, string) {
+	serviceName := merge(enclosing, sd.GetName())
+	ret := &ServiceDescriptor{proto: sd, file: fd, fqn: serviceName}
+	for _, m := range sd.GetMethod() {
+		md, n := createMethodDescriptor(fd, ret, serviceName, m)
+		symbols[n] = md
+		ret.methods = append(ret.methods, md)
+	}
+	return ret, serviceName
+}
+
+func (sd *ServiceDescriptor) resolve(path []int32, scopes []scope) error {
+	sd.sourceInfoPath = append([]int32(nil), path...) // defensive copy
+	path = append(path, internal.Service_methodsTag)
+	for i, md := range sd.methods {
+		if err := md.resolve(append(path, int32(i)), scopes); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+// GetName returns the simple (unqualified) name of the service.
+func (sd *ServiceDescriptor) GetName() string {
+	return sd.proto.GetName()
+}
+
+// GetFullyQualifiedName returns the fully qualified name of the service. This
+// includes the package name (if there is one).
+func (sd *ServiceDescriptor) GetFullyQualifiedName() string {
+	return sd.fqn
+}
+
+// GetParent returns the descriptor for the file in which this service is
+// defined. Most usages will prefer to use GetFile, which has a concrete return
+// type. This more generic method is present to satisfy the Descriptor interface.
+func (sd *ServiceDescriptor) GetParent() Descriptor {
+	return sd.file
+}
+
+// GetFile returns the descriptor for the file in which this service is defined.
+func (sd *ServiceDescriptor) GetFile() *FileDescriptor {
+	return sd.file
+}
+
+// GetOptions returns the service's options. Most usages will be more interested
+// in GetServiceOptions, which has a concrete return type. This generic version
+// is present to satisfy the Descriptor interface.
+func (sd *ServiceDescriptor) GetOptions() proto.Message {
+	return sd.proto.GetOptions()
+}
+
+// GetServiceOptions returns the service's options.
+func (sd *ServiceDescriptor) GetServiceOptions() *dpb.ServiceOptions {
+	return sd.proto.GetOptions()
+}
+
+// GetSourceInfo returns source info for the service, if present in the
+// descriptor. Not all descriptors will contain source info. If non-nil, the
+// returned info contains information about the location in the file where the
+// service was defined and also contains comments associated with the service
+// definition.
+func (sd *ServiceDescriptor) GetSourceInfo() *dpb.SourceCodeInfo_Location {
+	return sd.file.sourceInfo.Get(sd.sourceInfoPath)
+}
+
+// AsProto returns the underlying descriptor proto. Most usages will be more
+// interested in AsServiceDescriptorProto, which has a concrete return type.
+// This generic version is present to satisfy the Descriptor interface.
+func (sd *ServiceDescriptor) AsProto() proto.Message {
+	return sd.proto
+}
+
+// AsServiceDescriptorProto returns the underlying descriptor proto.
+func (sd *ServiceDescriptor) AsServiceDescriptorProto() *dpb.ServiceDescriptorProto {
+	return sd.proto
+}
+
+// String returns the underlying descriptor proto, in compact text format.
+func (sd *ServiceDescriptor) String() string {
+	return sd.proto.String()
+}
+
+// GetMethods returns all of the RPC methods for this service.
+func (sd *ServiceDescriptor) GetMethods() []*MethodDescriptor {
+	return sd.methods
+}
+
+// FindMethodByName finds the method with the given name. If no such method exists
+// then nil is returned.
+func (sd *ServiceDescriptor) FindMethodByName(name string) *MethodDescriptor {
+	fqn := fmt.Sprintf("%s.%s", sd.fqn, name)
+	if md, ok := sd.file.symbols[fqn].(*MethodDescriptor); ok {
+		return md
+	} else {
+		return nil
+	}
+}
+
+// MethodDescriptor describes an RPC method declared in a proto file.
+type MethodDescriptor struct {
+	proto          *dpb.MethodDescriptorProto
+	parent         *ServiceDescriptor
+	file           *FileDescriptor
+	inType         *MessageDescriptor
+	outType        *MessageDescriptor
+	fqn            string
+	sourceInfoPath []int32
+}
+
+func createMethodDescriptor(fd *FileDescriptor, parent *ServiceDescriptor, enclosing string, md *dpb.MethodDescriptorProto) (*MethodDescriptor, string) {
+	// request and response types get resolved later
+	methodName := merge(enclosing, md.GetName())
+	return &MethodDescriptor{proto: md, parent: parent, file: fd, fqn: methodName}, methodName
+}
+
+func (md *MethodDescriptor) resolve(path []int32, scopes []scope) error {
+	md.sourceInfoPath = append([]int32(nil), path...) // defensive copy
+	if desc, err := resolve(md.file, md.proto.GetInputType(), scopes); err != nil {
+		return err
+	} else {
+		md.inType = desc.(*MessageDescriptor)
+	}
+	if desc, err := resolve(md.file, md.proto.GetOutputType(), scopes); err != nil {
+		return err
+	} else {
+		md.outType = desc.(*MessageDescriptor)
+	}
+	return nil
+}
+
+// GetName returns the name of the method.
+func (md *MethodDescriptor) GetName() string {
+	return md.proto.GetName()
+}
+
+// GetFullyQualifiedName returns the fully qualified name of the method. Unlike
+// GetName, this includes fully qualified name of the enclosing service.
+func (md *MethodDescriptor) GetFullyQualifiedName() string {
+	return md.fqn
+}
+
+// GetParent returns the descriptor for the service in which this method is
+// defined. Most usages will prefer to use GetService, which has a concrete
+// return type. This more generic method is present to satisfy the Descriptor
+// interface.
+func (md *MethodDescriptor) GetParent() Descriptor {
+	return md.parent
+}
+
+// GetService returns the RPC service in which this method is declared.
+func (md *MethodDescriptor) GetService() *ServiceDescriptor {
+	return md.parent
+}
+
+// GetFile returns the descriptor for the file in which this method is defined.
+func (md *MethodDescriptor) GetFile() *FileDescriptor {
+	return md.file
+}
+
+// GetOptions returns the method's options. Most usages will be more interested
+// in GetMethodOptions, which has a concrete return type. This generic version
+// is present to satisfy the Descriptor interface.
+func (md *MethodDescriptor) GetOptions() proto.Message {
+	return md.proto.GetOptions()
+}
+
+// GetMethodOptions returns the method's options.
+func (md *MethodDescriptor) GetMethodOptions() *dpb.MethodOptions {
+	return md.proto.GetOptions()
+}
+
+// GetSourceInfo returns source info for the method, if present in the
+// descriptor. Not all descriptors will contain source info. If non-nil, the
+// returned info contains information about the location in the file where the
+// method was defined and also contains comments associated with the method
+// definition.
+func (md *MethodDescriptor) GetSourceInfo() *dpb.SourceCodeInfo_Location {
+	return md.file.sourceInfo.Get(md.sourceInfoPath)
+}
+
+// AsProto returns the underlying descriptor proto. Most usages will be more
+// interested in AsMethodDescriptorProto, which has a concrete return type. This
+// generic version is present to satisfy the Descriptor interface.
+func (md *MethodDescriptor) AsProto() proto.Message {
+	return md.proto
+}
+
+// AsMethodDescriptorProto returns the underlying descriptor proto.
+func (md *MethodDescriptor) AsMethodDescriptorProto() *dpb.MethodDescriptorProto {
+	return md.proto
+}
+
+// String returns the underlying descriptor proto, in compact text format.
+func (md *MethodDescriptor) String() string {
+	return md.proto.String()
+}
+
+// IsServerStreaming returns true if this is a server-streaming method.
+func (md *MethodDescriptor) IsServerStreaming() bool {
+	return md.proto.GetServerStreaming()
+}
+
+// IsClientStreaming returns true if this is a client-streaming method.
+func (md *MethodDescriptor) IsClientStreaming() bool {
+	return md.proto.GetClientStreaming()
+}
+
+// GetInputType returns the input type, or request type, of the RPC method.
+func (md *MethodDescriptor) GetInputType() *MessageDescriptor {
+	return md.inType
+}
+
+// GetOutputType returns the output type, or response type, of the RPC method.
+func (md *MethodDescriptor) GetOutputType() *MessageDescriptor {
+	return md.outType
+}
+
+// OneOfDescriptor describes a one-of field set declared in a protocol buffer message.
+type OneOfDescriptor struct {
+	proto          *dpb.OneofDescriptorProto
+	parent         *MessageDescriptor
+	file           *FileDescriptor
+	choices        []*FieldDescriptor
+	fqn            string
+	sourceInfoPath []int32
+}
+
+func createOneOfDescriptor(fd *FileDescriptor, parent *MessageDescriptor, index int, enclosing string, od *dpb.OneofDescriptorProto) (*OneOfDescriptor, string) {
+	oneOfName := merge(enclosing, od.GetName())
+	ret := &OneOfDescriptor{proto: od, parent: parent, file: fd, fqn: oneOfName}
+	for _, f := range parent.fields {
+		oi := f.proto.OneofIndex
+		if oi != nil && *oi == int32(index) {
+			f.oneOf = ret
+			ret.choices = append(ret.choices, f)
+		}
+	}
+	return ret, oneOfName
+}
+
+func (od *OneOfDescriptor) resolve(path []int32) {
+	od.sourceInfoPath = append([]int32(nil), path...) // defensive copy
+}
+
+// GetName returns the name of the one-of.
+func (od *OneOfDescriptor) GetName() string {
+	return od.proto.GetName()
+}
+
+// GetFullyQualifiedName returns the fully qualified name of the one-of. Unlike
+// GetName, this includes fully qualified name of the enclosing message.
+func (od *OneOfDescriptor) GetFullyQualifiedName() string {
+	return od.fqn
+}
+
+// GetParent returns the descriptor for the message in which this one-of is
+// defined. Most usages will prefer to use GetOwner, which has a concrete
+// return type. This more generic method is present to satisfy the Descriptor
+// interface.
+func (od *OneOfDescriptor) GetParent() Descriptor {
+	return od.parent
+}
+
+// GetOwner returns the message to which this one-of field set belongs.
+func (od *OneOfDescriptor) GetOwner() *MessageDescriptor {
+	return od.parent
+}
+
+// GetFile returns the descriptor for the file in which this one-fof is defined.
+func (od *OneOfDescriptor) GetFile() *FileDescriptor {
+	return od.file
+}
+
+// GetOptions returns the one-of's options. Most usages will be more interested
+// in GetOneOfOptions, which has a concrete return type. This generic version
+// is present to satisfy the Descriptor interface.
+func (od *OneOfDescriptor) GetOptions() proto.Message {
+	return od.proto.GetOptions()
+}
+
+// GetOneOfOptions returns the one-of's options.
+func (od *OneOfDescriptor) GetOneOfOptions() *dpb.OneofOptions {
+	return od.proto.GetOptions()
+}
+
+// GetSourceInfo returns source info for the one-of, if present in the
+// descriptor. Not all descriptors will contain source info. If non-nil, the
+// returned info contains information about the location in the file where the
+// one-of was defined and also contains comments associated with the one-of
+// definition.
+func (od *OneOfDescriptor) GetSourceInfo() *dpb.SourceCodeInfo_Location {
+	return od.file.sourceInfo.Get(od.sourceInfoPath)
+}
+
+// AsProto returns the underlying descriptor proto. Most usages will be more
+// interested in AsOneofDescriptorProto, which has a concrete return type. This
+// generic version is present to satisfy the Descriptor interface.
+func (od *OneOfDescriptor) AsProto() proto.Message {
+	return od.proto
+}
+
+// AsOneofDescriptorProto returns the underlying descriptor proto.
+func (od *OneOfDescriptor) AsOneofDescriptorProto() *dpb.OneofDescriptorProto {
+	return od.proto
+}
+
+// String returns the underlying descriptor proto, in compact text format.
+func (od *OneOfDescriptor) String() string {
+	return od.proto.String()
+}
+
+// GetChoices returns the fields that are part of the one-of field set. At most one of
+// these fields may be set for a given message.
+func (od *OneOfDescriptor) GetChoices() []*FieldDescriptor {
+	return od.choices
+}
+
+// scope represents a lexical scope in a proto file in which messages and enums
+// can be declared.
+type scope func(string) Descriptor
+
+func fileScope(fd *FileDescriptor) scope {
+	// we search symbols in this file, but also symbols in other files that have
+	// the same package as this file or a "parent" package (in protobuf,
+	// packages are a hierarchy like C++ namespaces)
+	prefixes := internal.CreatePrefixList(fd.proto.GetPackage())
+	return func(name string) Descriptor {
+		for _, prefix := range prefixes {
+			n := merge(prefix, name)
+			d := findSymbol(fd, n, false)
+			if d != nil {
+				return d
+			}
+		}
+		return nil
+	}
+}
+
+func messageScope(md *MessageDescriptor) scope {
+	return func(name string) Descriptor {
+		n := merge(md.fqn, name)
+		if d, ok := md.file.symbols[n]; ok {
+			return d
+		}
+		return nil
+	}
+}
+
+func resolve(fd *FileDescriptor, name string, scopes []scope) (Descriptor, error) {
+	if strings.HasPrefix(name, ".") {
+		// already fully-qualified
+		d := findSymbol(fd, name[1:], false)
+		if d != nil {
+			return d, nil
+		}
+	} else {
+		// unqualified, so we look in the enclosing (last) scope first and move
+		// towards outermost (first) scope, trying to resolve the symbol
+		for i := len(scopes) - 1; i >= 0; i-- {
+			d := scopes[i](name)
+			if d != nil {
+				return d, nil
+			}
+		}
+	}
+	return nil, fmt.Errorf("file %q included an unresolvable reference to %q", fd.proto.GetName(), name)
+}
+
+func findSymbol(fd *FileDescriptor, name string, public bool) Descriptor {
+	d := fd.symbols[name]
+	if d != nil {
+		return d
+	}
+
+	// When public = false, we are searching only directly imported symbols. But we
+	// also need to search transitive public imports due to semantics of public imports.
+	var deps []*FileDescriptor
+	if public {
+		deps = fd.publicDeps
+	} else {
+		deps = fd.deps
+	}
+	for _, dep := range deps {
+		d = findSymbol(dep, name, true)
+		if d != nil {
+			return d
+		}
+	}
+
+	return nil
+}
+
+func merge(a, b string) string {
+	if a == "" {
+		return b
+	} else {
+		return a + "." + b
+	}
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/descriptor_no_unsafe.go b/vendor/github.com/jhump/protoreflect/desc/descriptor_no_unsafe.go
new file mode 100644
index 0000000..d8e2df0
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/descriptor_no_unsafe.go
@@ -0,0 +1,30 @@
+//+build appengine
+// TODO: other build tags for environments where unsafe package is inappropriate
+
+package desc
+
+type jsonNameMap struct{}
+type memoizedDefault struct{}
+
+// FindFieldByJSONName finds the field with the given JSON field name. If no such
+// field exists then nil is returned. Only regular fields are returned, not
+// extensions.
+func (md *MessageDescriptor) FindFieldByJSONName(jsonName string) *FieldDescriptor {
+	// NB: With allowed use of unsafe, we use it to atomically define an index
+	// via atomic.LoadPointer/atomic.StorePointer. Without it, we skip the index
+	// and do an linear scan of fields each time.
+	for _, f := range md.fields {
+		jn := f.proto.GetJsonName()
+		if jn == "" {
+			jn = f.proto.GetName()
+		}
+		if jn == jsonName {
+			return f
+		}
+	}
+	return nil
+}
+
+func (fd *FieldDescriptor) getDefaultValue() interface{} {
+	return fd.determineDefault()
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/descriptor_unsafe.go b/vendor/github.com/jhump/protoreflect/desc/descriptor_unsafe.go
new file mode 100644
index 0000000..6ff872f
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/descriptor_unsafe.go
@@ -0,0 +1,59 @@
+//+build !appengine
+// TODO: exclude other build tags for environments where unsafe package is inappropriate
+
+package desc
+
+import (
+	"sync/atomic"
+	"unsafe"
+)
+
+type jsonNameMap map[string]*FieldDescriptor // loaded/stored atomically via atomic+unsafe
+type memoizedDefault *interface{}            // loaded/stored atomically via atomic+unsafe
+
+// FindFieldByJSONName finds the field with the given JSON field name. If no such
+// field exists then nil is returned. Only regular fields are returned, not
+// extensions.
+func (md *MessageDescriptor) FindFieldByJSONName(jsonName string) *FieldDescriptor {
+	// NB: We don't want to eagerly index JSON names because many programs won't use it.
+	// So we want to do it lazily, but also make sure the result is thread-safe. So we
+	// atomically load/store the map as if it were a normal pointer. We don't use other
+	// mechanisms -- like sync.Mutex, sync.RWMutex, sync.Once, or atomic.Value -- to
+	// do this lazily because those types cannot be copied, and we'd rather not induce
+	// 'go vet' errors in programs that use descriptors and try to copy them.
+	// If multiple goroutines try to access the index at the same time, before it is
+	// built, they will all end up computing the index redundantly. Future reads of
+	// the index will use whatever was the "last one stored" by those racing goroutines.
+	// Since building the index is deterministic, this is fine: all indices computed
+	// will be the same.
+	addrOfJsonNames := (*unsafe.Pointer)(unsafe.Pointer(&md.jsonNames))
+	jsonNames := atomic.LoadPointer(addrOfJsonNames)
+	var index map[string]*FieldDescriptor
+	if jsonNames == nil {
+		// slow path: compute the index
+		index = map[string]*FieldDescriptor{}
+		for _, f := range md.fields {
+			jn := f.proto.GetJsonName()
+			if jn == "" {
+				jn = f.proto.GetName()
+			}
+			index[jn] = f
+		}
+		atomic.StorePointer(addrOfJsonNames, *(*unsafe.Pointer)(unsafe.Pointer(&index)))
+	} else {
+		*(*unsafe.Pointer)(unsafe.Pointer(&index)) = jsonNames
+	}
+	return index[jsonName]
+}
+
+func (fd *FieldDescriptor) getDefaultValue() interface{} {
+	addrOfDef := (*unsafe.Pointer)(unsafe.Pointer(&fd.def))
+	def := atomic.LoadPointer(addrOfDef)
+	if def != nil {
+		return *(*interface{})(def)
+	}
+	// slow path: compute the default, potentially involves decoding value
+	d := fd.determineDefault()
+	atomic.StorePointer(addrOfDef, (unsafe.Pointer(&d)))
+	return d
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/doc.go b/vendor/github.com/jhump/protoreflect/desc/doc.go
new file mode 100644
index 0000000..1740dce
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/doc.go
@@ -0,0 +1,41 @@
+// Package desc contains "rich descriptors" for protocol buffers. The built-in
+// descriptor types are simple protobuf messages, each one representing a
+// different kind of element in the AST of a .proto source file.
+//
+// Because of this inherent "tree" quality, these build-in descriptors cannot
+// refer to their enclosing file descriptor. Nor can a field descriptor refer to
+// a message or enum descriptor that represents the field's type (for enum and
+// nested message fields). All such links must instead be stringly typed. This
+// limitation makes them much harder to use for doing interesting things with
+// reflection.
+//
+// Without this package, resolving references to types is particularly complex.
+// For example, resolving a field's type, the message type an extension extends,
+// or the request and response types of an RPC method all require searching
+// through symbols defined not only in the file in which these elements are
+// declared but also in its transitive closure of dependencies.
+//
+// "Rich descriptors" avoid the need to deal with the complexities described
+// above. A rich descriptor has all type references resolved and provides
+// methods to access other rich descriptors for all referenced elements. Each
+// rich descriptor has a usefully broad API, but does not try to mimic the full
+// interface of the underlying descriptor proto. Instead, every rich descriptor
+// provides access to that underlying proto, for extracting descriptor
+// properties that are not immediately accessible through rich descriptor's
+// methods.
+//
+// Rich descriptors can be accessed in similar ways as their "poor" cousins
+// (descriptor protos). Instead of using proto.FileDescriptor, use
+// desc.LoadFileDescriptor. Message descriptors and extension field descriptors
+// can also be easily accessed using desc.LoadMessageDescriptor and
+// desc.LoadFieldDescriptorForExtension, respectively.
+//
+// It is also possible create rich descriptors for proto messages that a given
+// Go program doesn't even know about. For example, they could be loaded from a
+// FileDescriptorSet file (which can be generated by protoc) or loaded from a
+// server. This enables interesting things like dynamic clients: where a Go
+// program can be an RPC client of a service it wasn't compiled to know about.
+//
+// Also see the grpcreflect, dynamic, and grpcdynamic packages in this same
+// repo to see just how useful rich descriptors really are.
+package desc
diff --git a/vendor/github.com/jhump/protoreflect/desc/imports.go b/vendor/github.com/jhump/protoreflect/desc/imports.go
new file mode 100644
index 0000000..caf3277
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/imports.go
@@ -0,0 +1,306 @@
+package desc
+
+import (
+	"fmt"
+	"path/filepath"
+	"reflect"
+	"strings"
+	"sync"
+
+	"github.com/golang/protobuf/proto"
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+)
+
+var (
+	globalImportPathConf map[string]string
+	globalImportPathMu   sync.RWMutex
+)
+
+// RegisterImportPath registers an alternate import path for a given registered
+// proto file path. For more details on why alternate import paths may need to
+// be configured, see ImportResolver.
+//
+// This method panics if provided invalid input. An empty importPath is invalid.
+// An un-registered registerPath is also invalid. For example, if an attempt is
+// made to register the import path "foo/bar.proto" as "bar.proto", but there is
+// no "bar.proto" registered in the Go protobuf runtime, this method will panic.
+// This method also panics if an attempt is made to register the same import
+// path more than once.
+//
+// This function works globally, applying to all descriptors loaded by this
+// package. If you instead want more granular support for handling alternate
+// import paths -- such as for a single invocation of a function in this
+// package or when the alternate path is only used from one file (so you don't
+// want the alternate path used when loading every other file), use an
+// ImportResolver instead.
+func RegisterImportPath(registerPath, importPath string) {
+	if len(importPath) == 0 {
+		panic("import path cannot be empty")
+	}
+	desc := proto.FileDescriptor(registerPath)
+	if len(desc) == 0 {
+		panic(fmt.Sprintf("path %q is not a registered proto file", registerPath))
+	}
+	globalImportPathMu.Lock()
+	defer globalImportPathMu.Unlock()
+	if reg := globalImportPathConf[importPath]; reg != "" {
+		panic(fmt.Sprintf("import path %q already registered for %s", importPath, reg))
+	}
+	if globalImportPathConf == nil {
+		globalImportPathConf = map[string]string{}
+	}
+	globalImportPathConf[importPath] = registerPath
+}
+
+// ResolveImport resolves the given import path. If it has been registered as an
+// alternate via RegisterImportPath, the registered path is returned. Otherwise,
+// the given import path is returned unchanged.
+func ResolveImport(importPath string) string {
+	importPath = clean(importPath)
+	globalImportPathMu.RLock()
+	defer globalImportPathMu.RUnlock()
+	reg := globalImportPathConf[importPath]
+	if reg == "" {
+		return importPath
+	}
+	return reg
+}
+
+// ImportResolver lets you work-around linking issues that are caused by
+// mismatches between how a particular proto source file is registered in the Go
+// protobuf runtime and how that same file is imported by other files. The file
+// is registered using the same relative path given to protoc when the file is
+// compiled (i.e. when Go code is generated). So if any file tries to import
+// that source file, but using a different relative path, then a link error will
+// occur when this package tries to load a descriptor for the importing file.
+//
+// For example, let's say we have two proto source files: "foo/bar.proto" and
+// "fubar/baz.proto". The latter imports the former using a line like so:
+//    import "foo/bar.proto";
+// However, when protoc is invoked, the command-line args looks like so:
+//    protoc -Ifoo/ --go_out=foo/ bar.proto
+//    protoc -I./ -Ifubar/ --go_out=fubar/ baz.proto
+// Because the path given to protoc is just "bar.proto" and "baz.proto", this is
+// how they are registered in the Go protobuf runtime. So, when loading the
+// descriptor for "fubar/baz.proto", we'll see an import path of "foo/bar.proto"
+// but will find no file registered with that path:
+//    fd, err := desc.LoadFileDescriptor("baz.proto")
+//    // err will be non-nil, complaining that there is no such file
+//    // found named "foo/bar.proto"
+//
+// This can be remedied by registering alternate import paths using an
+// ImportResolver. Continuing with the example above, the code below would fix
+// any link issue:
+//    var r desc.ImportResolver
+//    r.RegisterImportPath("bar.proto", "foo/bar.proto")
+//    fd, err := r.LoadFileDescriptor("baz.proto")
+//    // err will be nil; descriptor successfully loaded!
+//
+// If there are files that are *always* imported using a different relative
+// path then how they are registered, consider using the global
+// RegisterImportPath function, so you don't have to use an ImportResolver for
+// every file that imports it.
+type ImportResolver struct {
+	children    map[string]*ImportResolver
+	importPaths map[string]string
+
+	// By default, an ImportResolver will fallback to consulting any paths
+	// registered via the top-level RegisterImportPath function. Setting this
+	// field to true will cause the ImportResolver to skip that fallback and
+	// only examine its own locally registered paths.
+	SkipFallbackRules bool
+}
+
+// ResolveImport resolves the given import path in the context of the given
+// source file. If a matching alternate has been registered with this resolver
+// via a call to RegisterImportPath or RegisterImportPathFrom, then the
+// registered path is returned. Otherwise, the given import path is returned
+// unchanged.
+func (r *ImportResolver) ResolveImport(source, importPath string) string {
+	if r != nil {
+		res := r.resolveImport(clean(source), clean(importPath))
+		if res != "" {
+			return res
+		}
+		if r.SkipFallbackRules {
+			return importPath
+		}
+	}
+	return ResolveImport(importPath)
+}
+
+func (r *ImportResolver) resolveImport(source, importPath string) string {
+	if source == "" {
+		return r.importPaths[importPath]
+	}
+	var car, cdr string
+	idx := strings.IndexRune(source, filepath.Separator)
+	if idx < 0 {
+		car, cdr = source, ""
+	} else {
+		car, cdr = source[:idx], source[idx+1:]
+	}
+	ch := r.children[car]
+	if ch != nil {
+		if reg := ch.resolveImport(cdr, importPath); reg != "" {
+			return reg
+		}
+	}
+	return r.importPaths[importPath]
+}
+
+// RegisterImportPath registers an alternate import path for a given registered
+// proto file path with this resolver. Any appearance of the given import path
+// when linking files will instead try to link the given registered path. If the
+// registered path cannot be located, then linking will fallback to the actual
+// imported path.
+//
+// This method will panic if given an empty path or if the same import path is
+// registered more than once.
+//
+// To constrain the contexts where the given import path is to be re-written,
+// use RegisterImportPathFrom instead.
+func (r *ImportResolver) RegisterImportPath(registerPath, importPath string) {
+	r.RegisterImportPathFrom(registerPath, importPath, "")
+}
+
+// RegisterImportPathFrom registers an alternate import path for a given
+// registered proto file path with this resolver, but only for imports in the
+// specified source context.
+//
+// The source context can be the name of a folder or a proto source file. Any
+// appearance of the given import path in that context will instead try to link
+// the given registered path. To be in context, the file that is being linked
+// (i.e. the one whose import statement is being resolved) must be the same
+// relative path of the source context or be a sub-path (i.e. a descendant of
+// the source folder).
+//
+// If the registered path cannot be located, then linking will fallback to the
+// actual imported path.
+//
+// This method will panic if given an empty path. The source context, on the
+// other hand, is allowed to be blank. A blank source matches all files. This
+// method also panics if the same import path is registered in the same source
+// context more than once.
+func (r *ImportResolver) RegisterImportPathFrom(registerPath, importPath, source string) {
+	importPath = clean(importPath)
+	if len(importPath) == 0 {
+		panic("import path cannot be empty")
+	}
+	registerPath = clean(registerPath)
+	if len(registerPath) == 0 {
+		panic("registered path cannot be empty")
+	}
+	r.registerImportPathFrom(registerPath, importPath, clean(source))
+}
+
+func (r *ImportResolver) registerImportPathFrom(registerPath, importPath, source string) {
+	if source == "" {
+		if r.importPaths == nil {
+			r.importPaths = map[string]string{}
+		} else if reg := r.importPaths[importPath]; reg != "" {
+			panic(fmt.Sprintf("already registered import path %q as %q", importPath, registerPath))
+		}
+		r.importPaths[importPath] = registerPath
+		return
+	}
+	var car, cdr string
+	idx := strings.IndexRune(source, filepath.Separator)
+	if idx < 0 {
+		car, cdr = source, ""
+	} else {
+		car, cdr = source[:idx], source[idx+1:]
+	}
+	ch := r.children[car]
+	if ch == nil {
+		if r.children == nil {
+			r.children = map[string]*ImportResolver{}
+		}
+		ch = &ImportResolver{}
+		r.children[car] = ch
+	}
+	ch.registerImportPathFrom(registerPath, importPath, cdr)
+}
+
+// LoadFileDescriptor is the same as the package function of the same name, but
+// any alternate paths configured in this resolver are used when linking the
+// given descriptor proto.
+func (r *ImportResolver) LoadFileDescriptor(filePath string) (*FileDescriptor, error) {
+	return loadFileDescriptor(filePath, r)
+}
+
+// LoadMessageDescriptor is the same as the package function of the same name,
+// but any alternate paths configured in this resolver are used when linking
+// files for the returned descriptor.
+func (r *ImportResolver) LoadMessageDescriptor(msgName string) (*MessageDescriptor, error) {
+	return loadMessageDescriptor(msgName, r)
+}
+
+// LoadMessageDescriptorForMessage is the same as the package function of the
+// same name, but any alternate paths configured in this resolver are used when
+// linking files for the returned descriptor.
+func (r *ImportResolver) LoadMessageDescriptorForMessage(msg proto.Message) (*MessageDescriptor, error) {
+	return loadMessageDescriptorForMessage(msg, r)
+}
+
+// LoadMessageDescriptorForType is the same as the package function of the same
+// name, but any alternate paths configured in this resolver are used when
+// linking files for the returned descriptor.
+func (r *ImportResolver) LoadMessageDescriptorForType(msgType reflect.Type) (*MessageDescriptor, error) {
+	return loadMessageDescriptorForType(msgType, r)
+}
+
+// LoadEnumDescriptorForEnum is the same as the package function of the same
+// name, but any alternate paths configured in this resolver are used when
+// linking files for the returned descriptor.
+func (r *ImportResolver) LoadEnumDescriptorForEnum(enum protoEnum) (*EnumDescriptor, error) {
+	return loadEnumDescriptorForEnum(enum, r)
+}
+
+// LoadEnumDescriptorForType is the same as the package function of the same
+// name, but any alternate paths configured in this resolver are used when
+// linking files for the returned descriptor.
+func (r *ImportResolver) LoadEnumDescriptorForType(enumType reflect.Type) (*EnumDescriptor, error) {
+	return loadEnumDescriptorForType(enumType, r)
+}
+
+// LoadFieldDescriptorForExtension is the same as the package function of the
+// same name, but any alternate paths configured in this resolver are used when
+// linking files for the returned descriptor.
+func (r *ImportResolver) LoadFieldDescriptorForExtension(ext *proto.ExtensionDesc) (*FieldDescriptor, error) {
+	return loadFieldDescriptorForExtension(ext, r)
+}
+
+// CreateFileDescriptor is the same as the package function of the same name,
+// but any alternate paths configured in this resolver are used when linking the
+// given descriptor proto.
+func (r *ImportResolver) CreateFileDescriptor(fdp *dpb.FileDescriptorProto, deps ...*FileDescriptor) (*FileDescriptor, error) {
+	return createFileDescriptor(fdp, deps, r)
+}
+
+// CreateFileDescriptors is the same as the package function of the same name,
+// but any alternate paths configured in this resolver are used when linking the
+// given descriptor protos.
+func (r *ImportResolver) CreateFileDescriptors(fds []*dpb.FileDescriptorProto) (map[string]*FileDescriptor, error) {
+	return createFileDescriptors(fds, r)
+}
+
+// CreateFileDescriptorFromSet is the same as the package function of the same
+// name, but any alternate paths configured in this resolver are used when
+// linking the descriptor protos in the given set.
+func (r *ImportResolver) CreateFileDescriptorFromSet(fds *dpb.FileDescriptorSet) (*FileDescriptor, error) {
+	return createFileDescriptorFromSet(fds, r)
+}
+
+const dotPrefix = "." + string(filepath.Separator)
+
+func clean(path string) string {
+	if path == "" {
+		return ""
+	}
+	path = filepath.Clean(path)
+	if path == "." {
+		return ""
+	}
+	return strings.TrimPrefix(path, dotPrefix)
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/internal/source_info.go b/vendor/github.com/jhump/protoreflect/desc/internal/source_info.go
new file mode 100644
index 0000000..4d7dbae
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/internal/source_info.go
@@ -0,0 +1,96 @@
+package internal
+
+import (
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+)
+
+// SourceInfoMap is a map of paths in a descriptor to the corresponding source
+// code info.
+type SourceInfoMap map[string]*dpb.SourceCodeInfo_Location
+
+// Get returns the source code info for the given path.
+func (m SourceInfoMap) Get(path []int32) *dpb.SourceCodeInfo_Location {
+	return m[asMapKey(path)]
+}
+
+// Put stores the given source code info for the given path.
+func (m SourceInfoMap) Put(path []int32, loc *dpb.SourceCodeInfo_Location) {
+	m[asMapKey(path)] = loc
+}
+
+// PutIfAbsent stores the given source code info for the given path only if the
+// given path does not exist in the map. This method returns true when the value
+// is stored, false if the path already exists.
+func (m SourceInfoMap) PutIfAbsent(path []int32, loc *dpb.SourceCodeInfo_Location) bool {
+	k := asMapKey(path)
+	if _, ok := m[k]; ok {
+		return false
+	}
+	m[k] = loc
+	return true
+}
+
+func asMapKey(slice []int32) string {
+	// NB: arrays should be usable as map keys, but this does not
+	// work due to a bug: https://github.com/golang/go/issues/22605
+	//rv := reflect.ValueOf(slice)
+	//arrayType := reflect.ArrayOf(rv.Len(), rv.Type().Elem())
+	//array := reflect.New(arrayType).Elem()
+	//reflect.Copy(array, rv)
+	//return array.Interface()
+
+	b := make([]byte, len(slice)*4)
+	for i, s := range slice {
+		j := i * 4
+		b[j] = byte(s)
+		b[j+1] = byte(s >> 8)
+		b[j+2] = byte(s >> 16)
+		b[j+3] = byte(s >> 24)
+	}
+	return string(b)
+}
+
+// CreateSourceInfoMap constructs a new SourceInfoMap and populates it with the
+// source code info in the given file descriptor proto.
+func CreateSourceInfoMap(fd *dpb.FileDescriptorProto) SourceInfoMap {
+	res := SourceInfoMap{}
+	PopulateSourceInfoMap(fd, res)
+	return res
+}
+
+// PopulateSourceInfoMap populates the given SourceInfoMap with information from
+// the given file descriptor.
+func PopulateSourceInfoMap(fd *dpb.FileDescriptorProto, m SourceInfoMap) {
+	for _, l := range fd.GetSourceCodeInfo().GetLocation() {
+		m.Put(l.Path, l)
+	}
+}
+
+// NB: This wonkiness allows desc.Descriptor impl to implement an interface that
+// is only usable from this package, by embedding a SourceInfoComputeFunc that
+// implements the actual logic (which must live in desc package to avoid a
+// dependency cycle).
+
+// SourceInfoComputer is a single method which will be invoked to recompute
+// source info. This is needed for the protoparse package, which needs to link
+// descriptors without source info in order to interpret options, but then needs
+// to re-compute source info after that interpretation so that final linked
+// descriptors expose the right info.
+type SourceInfoComputer interface {
+	recomputeSourceInfo()
+}
+
+// SourceInfoComputeFunc is the type that a desc.Descriptor will embed. It will
+// be aliased in the desc package to an unexported name so it is not marked as
+// an exported field in reflection and not present in Go docs.
+type SourceInfoComputeFunc func()
+
+func (f SourceInfoComputeFunc) recomputeSourceInfo() {
+	f()
+}
+
+// RecomputeSourceInfo is used to initiate recomputation of source info. This is
+// is used by the protoparse package, after it interprets options.
+func RecomputeSourceInfo(c SourceInfoComputer) {
+	c.recomputeSourceInfo()
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/internal/util.go b/vendor/github.com/jhump/protoreflect/desc/internal/util.go
new file mode 100644
index 0000000..d5197f1
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/internal/util.go
@@ -0,0 +1,267 @@
+package internal
+
+import (
+	"unicode"
+	"unicode/utf8"
+)
+
+const (
+	// MaxTag is the maximum allowed tag number for a field.
+	MaxTag = 536870911 // 2^29 - 1
+
+	// SpecialReservedStart is the first tag in a range that is reserved and not
+	// allowed for use in message definitions.
+	SpecialReservedStart = 19000
+	// SpecialReservedEnd is the last tag in a range that is reserved and not
+	// allowed for use in message definitions.
+	SpecialReservedEnd = 19999
+
+	// NB: It would be nice to use constants from generated code instead of
+	// hard-coding these here. But code-gen does not emit these as constants
+	// anywhere. The only places they appear in generated code are struct tags
+	// on fields of the generated descriptor protos.
+
+	// File_packageTag is the tag number of the package element in a file
+	// descriptor proto.
+	File_packageTag = 2
+	// File_dependencyTag is the tag number of the dependencies element in a
+	// file descriptor proto.
+	File_dependencyTag = 3
+	// File_messagesTag is the tag number of the messages element in a file
+	// descriptor proto.
+	File_messagesTag = 4
+	// File_enumsTag is the tag number of the enums element in a file descriptor
+	// proto.
+	File_enumsTag = 5
+	// File_servicesTag is the tag number of the services element in a file
+	// descriptor proto.
+	File_servicesTag = 6
+	// File_extensionsTag is the tag number of the extensions element in a file
+	// descriptor proto.
+	File_extensionsTag = 7
+	// File_optionsTag is the tag number of the options element in a file
+	// descriptor proto.
+	File_optionsTag = 8
+	// File_syntaxTag is the tag number of the syntax element in a file
+	// descriptor proto.
+	File_syntaxTag = 12
+	// Message_nameTag is the tag number of the name element in a message
+	// descriptor proto.
+	Message_nameTag = 1
+	// Message_fieldsTag is the tag number of the fields element in a message
+	// descriptor proto.
+	Message_fieldsTag = 2
+	// Message_nestedMessagesTag is the tag number of the nested messages
+	// element in a message descriptor proto.
+	Message_nestedMessagesTag = 3
+	// Message_enumsTag is the tag number of the enums element in a message
+	// descriptor proto.
+	Message_enumsTag = 4
+	// Message_extensionRangeTag is the tag number of the extension ranges
+	// element in a message descriptor proto.
+	Message_extensionRangeTag = 5
+	// Message_extensionsTag is the tag number of the extensions element in a
+	// message descriptor proto.
+	Message_extensionsTag = 6
+	// Message_optionsTag is the tag number of the options element in a message
+	// descriptor proto.
+	Message_optionsTag = 7
+	// Message_oneOfsTag is the tag number of the one-ofs element in a message
+	// descriptor proto.
+	Message_oneOfsTag = 8
+	// Message_reservedRangeTag is the tag number of the reserved ranges element
+	// in a message descriptor proto.
+	Message_reservedRangeTag = 9
+	// Message_reservedNameTag is the tag number of the reserved names element
+	// in a message descriptor proto.
+	Message_reservedNameTag = 10
+	// ExtensionRange_startTag is the tag number of the start index in an
+	// extension range proto.
+	ExtensionRange_startTag = 1
+	// ExtensionRange_endTag is the tag number of the end index in an
+	// extension range proto.
+	ExtensionRange_endTag = 2
+	// ExtensionRange_optionsTag is the tag number of the options element in an
+	// extension range proto.
+	ExtensionRange_optionsTag = 3
+	// ReservedRange_startTag is the tag number of the start index in a reserved
+	// range proto.
+	ReservedRange_startTag = 1
+	// ReservedRange_endTag is the tag number of the end index in a reserved
+	// range proto.
+	ReservedRange_endTag = 2
+	// Field_nameTag is the tag number of the name element in a field descriptor
+	// proto.
+	Field_nameTag = 1
+	// Field_extendeeTag is the tag number of the extendee element in a field
+	// descriptor proto.
+	Field_extendeeTag = 2
+	// Field_numberTag is the tag number of the number element in a field
+	// descriptor proto.
+	Field_numberTag = 3
+	// Field_labelTag is the tag number of the label element in a field
+	// descriptor proto.
+	Field_labelTag = 4
+	// Field_typeTag is the tag number of the type element in a field descriptor
+	// proto.
+	Field_typeTag = 5
+	// Field_defaultTag is the tag number of the default value element in a
+	// field descriptor proto.
+	Field_defaultTag = 7
+	// Field_optionsTag is the tag number of the options element in a field
+	// descriptor proto.
+	Field_optionsTag = 8
+	// Field_jsonNameTag is the tag number of the JSON name element in a field
+	// descriptor proto.
+	Field_jsonNameTag = 10
+	// OneOf_nameTag is the tag number of the name element in a one-of
+	// descriptor proto.
+	OneOf_nameTag = 1
+	// OneOf_optionsTag is the tag number of the options element in a one-of
+	// descriptor proto.
+	OneOf_optionsTag = 2
+	// Enum_nameTag is the tag number of the name element in an enum descriptor
+	// proto.
+	Enum_nameTag = 1
+	// Enum_valuesTag is the tag number of the values element in an enum
+	// descriptor proto.
+	Enum_valuesTag = 2
+	// Enum_optionsTag is the tag number of the options element in an enum
+	// descriptor proto.
+	Enum_optionsTag = 3
+	// Enum_reservedRangeTag is the tag number of the reserved ranges element in
+	// an enum descriptor proto.
+	Enum_reservedRangeTag = 4
+	// Enum_reservedNameTag is the tag number of the reserved names element in
+	// an enum descriptor proto.
+	Enum_reservedNameTag = 5
+	// EnumVal_nameTag is the tag number of the name element in an enum value
+	// descriptor proto.
+	EnumVal_nameTag = 1
+	// EnumVal_numberTag is the tag number of the number element in an enum
+	// value descriptor proto.
+	EnumVal_numberTag = 2
+	// EnumVal_optionsTag is the tag number of the options element in an enum
+	// value descriptor proto.
+	EnumVal_optionsTag = 3
+	// Service_nameTag is the tag number of the name element in a service
+	// descriptor proto.
+	Service_nameTag = 1
+	// Service_methodsTag is the tag number of the methods element in a service
+	// descriptor proto.
+	Service_methodsTag = 2
+	// Service_optionsTag is the tag number of the options element in a service
+	// descriptor proto.
+	Service_optionsTag = 3
+	// Method_nameTag is the tag number of the name element in a method
+	// descriptor proto.
+	Method_nameTag = 1
+	// Method_inputTag is the tag number of the input type element in a method
+	// descriptor proto.
+	Method_inputTag = 2
+	// Method_outputTag is the tag number of the output type element in a method
+	// descriptor proto.
+	Method_outputTag = 3
+	// Method_optionsTag is the tag number of the options element in a method
+	// descriptor proto.
+	Method_optionsTag = 4
+	// Method_inputStreamTag is the tag number of the input stream flag in a
+	// method descriptor proto.
+	Method_inputStreamTag = 5
+	// Method_outputStreamTag is the tag number of the output stream flag in a
+	// method descriptor proto.
+	Method_outputStreamTag = 6
+
+	// UninterpretedOptionsTag is the tag number of the uninterpreted options
+	// element. All *Options messages use the same tag for the field that stores
+	// uninterpreted options.
+	UninterpretedOptionsTag = 999
+
+	// Uninterpreted_nameTag is the tag number of the name element in an
+	// uninterpreted options proto.
+	Uninterpreted_nameTag = 2
+	// Uninterpreted_identTag is the tag number of the identifier value in an
+	// uninterpreted options proto.
+	Uninterpreted_identTag = 3
+	// Uninterpreted_posIntTag is the tag number of the positive int value in an
+	// uninterpreted options proto.
+	Uninterpreted_posIntTag = 4
+	// Uninterpreted_negIntTag is the tag number of the negative int value in an
+	// uninterpreted options proto.
+	Uninterpreted_negIntTag = 5
+	// Uninterpreted_doubleTag is the tag number of the double value in an
+	// uninterpreted options proto.
+	Uninterpreted_doubleTag = 6
+	// Uninterpreted_stringTag is the tag number of the string value in an
+	// uninterpreted options proto.
+	Uninterpreted_stringTag = 7
+	// Uninterpreted_aggregateTag is the tag number of the aggregate value in an
+	// uninterpreted options proto.
+	Uninterpreted_aggregateTag = 8
+	// UninterpretedName_nameTag is the tag number of the name element in an
+	// uninterpreted option name proto.
+	UninterpretedName_nameTag = 1
+)
+
+// JsonName returns the default JSON name for a field with the given name.
+func JsonName(name string) string {
+	var js []rune
+	nextUpper := false
+	for i, r := range name {
+		if r == '_' {
+			nextUpper = true
+			continue
+		}
+		if i == 0 {
+			js = append(js, r)
+		} else if nextUpper {
+			nextUpper = false
+			js = append(js, unicode.ToUpper(r))
+		} else {
+			js = append(js, r)
+		}
+	}
+	return string(js)
+}
+
+// InitCap returns the given field name, but with the first letter capitalized.
+func InitCap(name string) string {
+	r, sz := utf8.DecodeRuneInString(name)
+	return string(unicode.ToUpper(r)) + name[sz:]
+}
+
+// CreatePrefixList returns a list of package prefixes to search when resolving
+// a symbol name. If the given package is blank, it returns only the empty
+// string. If the given package contains only one token, e.g. "foo", it returns
+// that token and the empty string, e.g. ["foo", ""]. Otherwise, it returns
+// successively shorter prefixes of the package and then the empty string. For
+// example, for a package named "foo.bar.baz" it will return the following list:
+//   ["foo.bar.baz", "foo.bar", "foo", ""]
+func CreatePrefixList(pkg string) []string {
+	if pkg == "" {
+		return []string{""}
+	}
+
+	numDots := 0
+	// one pass to pre-allocate the returned slice
+	for i := 0; i < len(pkg); i++ {
+		if pkg[i] == '.' {
+			numDots++
+		}
+	}
+	if numDots == 0 {
+		return []string{pkg, ""}
+	}
+
+	prefixes := make([]string, numDots+2)
+	// second pass to fill in returned slice
+	for i := 0; i < len(pkg); i++ {
+		if pkg[i] == '.' {
+			prefixes[numDots] = pkg[:i]
+			numDots--
+		}
+	}
+	prefixes[0] = pkg
+
+	return prefixes
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/load.go b/vendor/github.com/jhump/protoreflect/desc/load.go
new file mode 100644
index 0000000..4a05830
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/load.go
@@ -0,0 +1,341 @@
+package desc
+
+import (
+	"fmt"
+	"reflect"
+	"sync"
+
+	"github.com/golang/protobuf/proto"
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/internal"
+)
+
+var (
+	cacheMu       sync.RWMutex
+	filesCache    = map[string]*FileDescriptor{}
+	messagesCache = map[string]*MessageDescriptor{}
+	enumCache     = map[reflect.Type]*EnumDescriptor{}
+)
+
+// LoadFileDescriptor creates a file descriptor using the bytes returned by
+// proto.FileDescriptor. Descriptors are cached so that they do not need to be
+// re-processed if the same file is fetched again later.
+func LoadFileDescriptor(file string) (*FileDescriptor, error) {
+	return loadFileDescriptor(file, nil)
+}
+
+func loadFileDescriptor(file string, r *ImportResolver) (*FileDescriptor, error) {
+	f := getFileFromCache(file)
+	if f != nil {
+		return f, nil
+	}
+	cacheMu.Lock()
+	defer cacheMu.Unlock()
+	return loadFileDescriptorLocked(file, r)
+}
+
+func loadFileDescriptorLocked(file string, r *ImportResolver) (*FileDescriptor, error) {
+	f := filesCache[file]
+	if f != nil {
+		return f, nil
+	}
+	fd, err := internal.LoadFileDescriptor(file)
+	if err != nil {
+		return nil, err
+	}
+
+	f, err = toFileDescriptorLocked(fd, r)
+	if err != nil {
+		return nil, err
+	}
+	putCacheLocked(file, f)
+	return f, nil
+}
+
+func toFileDescriptorLocked(fd *dpb.FileDescriptorProto, r *ImportResolver) (*FileDescriptor, error) {
+	deps := make([]*FileDescriptor, len(fd.GetDependency()))
+	for i, dep := range fd.GetDependency() {
+		resolvedDep := r.ResolveImport(fd.GetName(), dep)
+		var err error
+		deps[i], err = loadFileDescriptorLocked(resolvedDep, r)
+		if _, ok := err.(internal.ErrNoSuchFile); ok && resolvedDep != dep {
+			// try original path
+			deps[i], err = loadFileDescriptorLocked(dep, r)
+		}
+		if err != nil {
+			return nil, err
+		}
+	}
+	return CreateFileDescriptor(fd, deps...)
+}
+
+func getFileFromCache(file string) *FileDescriptor {
+	cacheMu.RLock()
+	defer cacheMu.RUnlock()
+	return filesCache[file]
+}
+
+func putCacheLocked(filename string, fd *FileDescriptor) {
+	filesCache[filename] = fd
+	putMessageCacheLocked(fd.messages)
+}
+
+func putMessageCacheLocked(mds []*MessageDescriptor) {
+	for _, md := range mds {
+		messagesCache[md.fqn] = md
+		putMessageCacheLocked(md.nested)
+	}
+}
+
+// interface implemented by generated messages, which all have a Descriptor() method in
+// addition to the methods of proto.Message
+type protoMessage interface {
+	proto.Message
+	Descriptor() ([]byte, []int)
+}
+
+// LoadMessageDescriptor loads descriptor using the encoded descriptor proto returned by
+// Message.Descriptor() for the given message type. If the given type is not recognized,
+// then a nil descriptor is returned.
+func LoadMessageDescriptor(message string) (*MessageDescriptor, error) {
+	return loadMessageDescriptor(message, nil)
+}
+
+func loadMessageDescriptor(message string, r *ImportResolver) (*MessageDescriptor, error) {
+	m := getMessageFromCache(message)
+	if m != nil {
+		return m, nil
+	}
+
+	pt := proto.MessageType(message)
+	if pt == nil {
+		return nil, nil
+	}
+	msg, err := messageFromType(pt)
+	if err != nil {
+		return nil, err
+	}
+
+	cacheMu.Lock()
+	defer cacheMu.Unlock()
+	return loadMessageDescriptorForTypeLocked(message, msg, r)
+}
+
+// LoadMessageDescriptorForType loads descriptor using the encoded descriptor proto returned
+// by message.Descriptor() for the given message type. If the given type is not recognized,
+// then a nil descriptor is returned.
+func LoadMessageDescriptorForType(messageType reflect.Type) (*MessageDescriptor, error) {
+	return loadMessageDescriptorForType(messageType, nil)
+}
+
+func loadMessageDescriptorForType(messageType reflect.Type, r *ImportResolver) (*MessageDescriptor, error) {
+	m, err := messageFromType(messageType)
+	if err != nil {
+		return nil, err
+	}
+	return loadMessageDescriptorForMessage(m, r)
+}
+
+// LoadMessageDescriptorForMessage loads descriptor using the encoded descriptor proto
+// returned by message.Descriptor(). If the given type is not recognized, then a nil
+// descriptor is returned.
+func LoadMessageDescriptorForMessage(message proto.Message) (*MessageDescriptor, error) {
+	return loadMessageDescriptorForMessage(message, nil)
+}
+
+func loadMessageDescriptorForMessage(message proto.Message, r *ImportResolver) (*MessageDescriptor, error) {
+	// efficiently handle dynamic messages
+	type descriptorable interface {
+		GetMessageDescriptor() *MessageDescriptor
+	}
+	if d, ok := message.(descriptorable); ok {
+		return d.GetMessageDescriptor(), nil
+	}
+
+	name := proto.MessageName(message)
+	if name == "" {
+		return nil, nil
+	}
+	m := getMessageFromCache(name)
+	if m != nil {
+		return m, nil
+	}
+
+	cacheMu.Lock()
+	defer cacheMu.Unlock()
+	return loadMessageDescriptorForTypeLocked(name, message.(protoMessage), nil)
+}
+
+func messageFromType(mt reflect.Type) (protoMessage, error) {
+	if mt.Kind() != reflect.Ptr {
+		mt = reflect.PtrTo(mt)
+	}
+	m, ok := reflect.Zero(mt).Interface().(protoMessage)
+	if !ok {
+		return nil, fmt.Errorf("failed to create message from type: %v", mt)
+	}
+	return m, nil
+}
+
+func loadMessageDescriptorForTypeLocked(name string, message protoMessage, r *ImportResolver) (*MessageDescriptor, error) {
+	m := messagesCache[name]
+	if m != nil {
+		return m, nil
+	}
+
+	fdb, _ := message.Descriptor()
+	fd, err := internal.DecodeFileDescriptor(name, fdb)
+	if err != nil {
+		return nil, err
+	}
+
+	f, err := toFileDescriptorLocked(fd, r)
+	if err != nil {
+		return nil, err
+	}
+	putCacheLocked(fd.GetName(), f)
+	return f.FindSymbol(name).(*MessageDescriptor), nil
+}
+
+func getMessageFromCache(message string) *MessageDescriptor {
+	cacheMu.RLock()
+	defer cacheMu.RUnlock()
+	return messagesCache[message]
+}
+
+// interface implemented by all generated enums
+type protoEnum interface {
+	EnumDescriptor() ([]byte, []int)
+}
+
+// NB: There is no LoadEnumDescriptor that takes a fully-qualified enum name because
+// it is not useful since protoc-gen-go does not expose the name anywhere in generated
+// code or register it in a way that is it accessible for reflection code. This also
+// means we have to cache enum descriptors differently -- we can only cache them as
+// they are requested, as opposed to caching all enum types whenever a file descriptor
+// is cached. This is because we need to know the generated type of the enums, and we
+// don't know that at the time of caching file descriptors.
+
+// LoadEnumDescriptorForType loads descriptor using the encoded descriptor proto returned
+// by enum.EnumDescriptor() for the given enum type.
+func LoadEnumDescriptorForType(enumType reflect.Type) (*EnumDescriptor, error) {
+	return loadEnumDescriptorForType(enumType, nil)
+}
+
+func loadEnumDescriptorForType(enumType reflect.Type, r *ImportResolver) (*EnumDescriptor, error) {
+	// we cache descriptors using non-pointer type
+	if enumType.Kind() == reflect.Ptr {
+		enumType = enumType.Elem()
+	}
+	e := getEnumFromCache(enumType)
+	if e != nil {
+		return e, nil
+	}
+	enum, err := enumFromType(enumType)
+	if err != nil {
+		return nil, err
+	}
+
+	cacheMu.Lock()
+	defer cacheMu.Unlock()
+	return loadEnumDescriptorForTypeLocked(enumType, enum, r)
+}
+
+// LoadEnumDescriptorForEnum loads descriptor using the encoded descriptor proto
+// returned by enum.EnumDescriptor().
+func LoadEnumDescriptorForEnum(enum protoEnum) (*EnumDescriptor, error) {
+	return loadEnumDescriptorForEnum(enum, nil)
+}
+
+func loadEnumDescriptorForEnum(enum protoEnum, r *ImportResolver) (*EnumDescriptor, error) {
+	et := reflect.TypeOf(enum)
+	// we cache descriptors using non-pointer type
+	if et.Kind() == reflect.Ptr {
+		et = et.Elem()
+		enum = reflect.Zero(et).Interface().(protoEnum)
+	}
+	e := getEnumFromCache(et)
+	if e != nil {
+		return e, nil
+	}
+
+	cacheMu.Lock()
+	defer cacheMu.Unlock()
+	return loadEnumDescriptorForTypeLocked(et, enum, r)
+}
+
+func enumFromType(et reflect.Type) (protoEnum, error) {
+	if et.Kind() != reflect.Int32 {
+		et = reflect.PtrTo(et)
+	}
+	e, ok := reflect.Zero(et).Interface().(protoEnum)
+	if !ok {
+		return nil, fmt.Errorf("failed to create enum from type: %v", et)
+	}
+	return e, nil
+}
+
+func loadEnumDescriptorForTypeLocked(et reflect.Type, enum protoEnum, r *ImportResolver) (*EnumDescriptor, error) {
+	e := enumCache[et]
+	if e != nil {
+		return e, nil
+	}
+
+	fdb, path := enum.EnumDescriptor()
+	name := fmt.Sprintf("%v", et)
+	fd, err := internal.DecodeFileDescriptor(name, fdb)
+	if err != nil {
+		return nil, err
+	}
+	// see if we already have cached "rich" descriptor
+	f, ok := filesCache[fd.GetName()]
+	if !ok {
+		f, err = toFileDescriptorLocked(fd, r)
+		if err != nil {
+			return nil, err
+		}
+		putCacheLocked(fd.GetName(), f)
+	}
+
+	ed := findEnum(f, path)
+	enumCache[et] = ed
+	return ed, nil
+}
+
+func getEnumFromCache(et reflect.Type) *EnumDescriptor {
+	cacheMu.RLock()
+	defer cacheMu.RUnlock()
+	return enumCache[et]
+}
+
+func findEnum(fd *FileDescriptor, path []int) *EnumDescriptor {
+	if len(path) == 1 {
+		return fd.GetEnumTypes()[path[0]]
+	}
+	md := fd.GetMessageTypes()[path[0]]
+	for _, i := range path[1 : len(path)-1] {
+		md = md.GetNestedMessageTypes()[i]
+	}
+	return md.GetNestedEnumTypes()[path[len(path)-1]]
+}
+
+// LoadFieldDescriptorForExtension loads the field descriptor that corresponds to the given
+// extension description.
+func LoadFieldDescriptorForExtension(ext *proto.ExtensionDesc) (*FieldDescriptor, error) {
+	return loadFieldDescriptorForExtension(ext, nil)
+}
+
+func loadFieldDescriptorForExtension(ext *proto.ExtensionDesc, r *ImportResolver) (*FieldDescriptor, error) {
+	file, err := loadFileDescriptor(ext.Filename, r)
+	if err != nil {
+		return nil, err
+	}
+	field, ok := file.FindSymbol(ext.Name).(*FieldDescriptor)
+	// make sure descriptor agrees with attributes of the ExtensionDesc
+	if !ok || !field.IsExtension() || field.GetOwner().GetFullyQualifiedName() != proto.MessageName(ext.ExtendedType) ||
+		field.GetNumber() != ext.Field {
+		return nil, fmt.Errorf("file descriptor contained unexpected object with name %s", ext.Name)
+	}
+	return field, nil
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/.gitignore b/vendor/github.com/jhump/protoreflect/desc/protoparse/.gitignore
new file mode 100644
index 0000000..2652053
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/.gitignore
@@ -0,0 +1 @@
+y.output
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast.go
new file mode 100644
index 0000000..2499917
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast.go
@@ -0,0 +1,1081 @@
+package protoparse
+
+import "fmt"
+
+// This file defines all of the nodes in the proto AST.
+
+// ErrorWithSourcePos is an error about a proto source file includes information
+// about the location in the file that caused the error.
+type ErrorWithSourcePos struct {
+	Underlying error
+	Pos        *SourcePos
+}
+
+// Error implements the error interface
+func (e ErrorWithSourcePos) Error() string {
+	if e.Pos.Line <= 0 || e.Pos.Col <= 0 {
+		return fmt.Sprintf("%s: %v", e.Pos.Filename, e.Underlying)
+	}
+	return fmt.Sprintf("%s:%d:%d: %v", e.Pos.Filename, e.Pos.Line, e.Pos.Col, e.Underlying)
+}
+
+// SourcePos identifies a location in a proto source file.
+type SourcePos struct {
+	Filename  string
+	Line, Col int
+	Offset    int
+}
+
+func unknownPos(filename string) *SourcePos {
+	return &SourcePos{Filename: filename}
+}
+
+type node interface {
+	start() *SourcePos
+	end() *SourcePos
+	leadingComments() []*comment
+	trailingComments() []*comment
+}
+
+type terminalNode interface {
+	node
+	popLeadingComment() *comment
+	pushTrailingComment(*comment)
+}
+
+var _ terminalNode = (*basicNode)(nil)
+var _ terminalNode = (*stringLiteralNode)(nil)
+var _ terminalNode = (*intLiteralNode)(nil)
+var _ terminalNode = (*floatLiteralNode)(nil)
+var _ terminalNode = (*identNode)(nil)
+
+type fileDecl interface {
+	node
+	getSyntax() node
+}
+
+var _ fileDecl = (*fileNode)(nil)
+var _ fileDecl = (*noSourceNode)(nil)
+
+type optionDecl interface {
+	node
+	getName() node
+	getValue() valueNode
+}
+
+var _ optionDecl = (*optionNode)(nil)
+var _ optionDecl = (*noSourceNode)(nil)
+
+type fieldDecl interface {
+	node
+	fieldLabel() node
+	fieldName() node
+	fieldType() node
+	fieldTag() node
+	fieldExtendee() node
+	getGroupKeyword() node
+}
+
+var _ fieldDecl = (*fieldNode)(nil)
+var _ fieldDecl = (*groupNode)(nil)
+var _ fieldDecl = (*mapFieldNode)(nil)
+var _ fieldDecl = (*syntheticMapField)(nil)
+var _ fieldDecl = (*noSourceNode)(nil)
+
+type rangeDecl interface {
+	node
+	rangeStart() node
+	rangeEnd() node
+}
+
+var _ rangeDecl = (*rangeNode)(nil)
+var _ rangeDecl = (*noSourceNode)(nil)
+
+type enumValueDecl interface {
+	node
+	getName() node
+	getNumber() node
+}
+
+var _ enumValueDecl = (*enumValueNode)(nil)
+var _ enumValueDecl = (*noSourceNode)(nil)
+
+type msgDecl interface {
+	node
+	messageName() node
+	reservedNames() []*stringLiteralNode
+}
+
+var _ msgDecl = (*messageNode)(nil)
+var _ msgDecl = (*groupNode)(nil)
+var _ msgDecl = (*mapFieldNode)(nil)
+var _ msgDecl = (*noSourceNode)(nil)
+
+type methodDecl interface {
+	node
+	getInputType() node
+	getOutputType() node
+}
+
+var _ methodDecl = (*methodNode)(nil)
+var _ methodDecl = (*noSourceNode)(nil)
+
+type posRange struct {
+	start, end *SourcePos
+}
+
+type basicNode struct {
+	posRange
+	leading  []*comment
+	trailing []*comment
+}
+
+func (n *basicNode) start() *SourcePos {
+	return n.posRange.start
+}
+
+func (n *basicNode) end() *SourcePos {
+	return n.posRange.end
+}
+
+func (n *basicNode) leadingComments() []*comment {
+	return n.leading
+}
+
+func (n *basicNode) trailingComments() []*comment {
+	return n.trailing
+}
+
+func (n *basicNode) popLeadingComment() *comment {
+	c := n.leading[0]
+	n.leading = n.leading[1:]
+	return c
+}
+
+func (n *basicNode) pushTrailingComment(c *comment) {
+	n.trailing = append(n.trailing, c)
+}
+
+type comment struct {
+	posRange
+	text string
+}
+
+type basicCompositeNode struct {
+	first node
+	last  node
+}
+
+func (n *basicCompositeNode) start() *SourcePos {
+	return n.first.start()
+}
+
+func (n *basicCompositeNode) end() *SourcePos {
+	return n.last.end()
+}
+
+func (n *basicCompositeNode) leadingComments() []*comment {
+	return n.first.leadingComments()
+}
+
+func (n *basicCompositeNode) trailingComments() []*comment {
+	return n.last.trailingComments()
+}
+
+func (n *basicCompositeNode) setRange(first, last node) {
+	n.first = first
+	n.last = last
+}
+
+type fileNode struct {
+	basicCompositeNode
+	syntax *syntaxNode
+	decls  []*fileElement
+
+	// These fields are populated after parsing, to make it easier to find them
+	// without searching decls. The parse result has a map of descriptors to
+	// nodes which makes the other declarations easily discoverable. But these
+	// elements do not map to descriptors -- they are just stored as strings in
+	// the file descriptor.
+	imports []*importNode
+	pkg     *packageNode
+}
+
+func (n *fileNode) getSyntax() node {
+	return n.syntax
+}
+
+type fileElement struct {
+	// a discriminated union: only one field will be set
+	imp     *importNode
+	pkg     *packageNode
+	option  *optionNode
+	message *messageNode
+	enum    *enumNode
+	extend  *extendNode
+	service *serviceNode
+	empty   *basicNode
+}
+
+func (n *fileElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *fileElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *fileElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *fileElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *fileElement) get() node {
+	switch {
+	case n.imp != nil:
+		return n.imp
+	case n.pkg != nil:
+		return n.pkg
+	case n.option != nil:
+		return n.option
+	case n.message != nil:
+		return n.message
+	case n.enum != nil:
+		return n.enum
+	case n.extend != nil:
+		return n.extend
+	case n.service != nil:
+		return n.service
+	default:
+		return n.empty
+	}
+}
+
+type syntaxNode struct {
+	basicCompositeNode
+	syntax *stringLiteralNode
+}
+
+type importNode struct {
+	basicCompositeNode
+	name   *stringLiteralNode
+	public bool
+	weak   bool
+}
+
+type packageNode struct {
+	basicCompositeNode
+	name *identNode
+}
+
+type identifier string
+
+type identKind int
+
+const (
+	identSimpleName identKind = iota
+	identQualified
+	identTypeName
+)
+
+type identNode struct {
+	basicNode
+	val  string
+	kind identKind
+}
+
+func (n *identNode) value() interface{} {
+	return identifier(n.val)
+}
+
+type optionNode struct {
+	basicCompositeNode
+	name *optionNameNode
+	val  valueNode
+}
+
+func (n *optionNode) getName() node {
+	return n.name
+}
+
+func (n *optionNode) getValue() valueNode {
+	return n.val
+}
+
+type optionNameNode struct {
+	basicCompositeNode
+	parts []*optionNamePartNode
+}
+
+type optionNamePartNode struct {
+	basicCompositeNode
+	text        *identNode
+	offset      int
+	length      int
+	isExtension bool
+	st, en      *SourcePos
+}
+
+func (n *optionNamePartNode) start() *SourcePos {
+	if n.isExtension {
+		return n.basicCompositeNode.start()
+	}
+	return n.st
+}
+
+func (n *optionNamePartNode) end() *SourcePos {
+	if n.isExtension {
+		return n.basicCompositeNode.end()
+	}
+	return n.en
+}
+
+func (n *optionNamePartNode) setRange(first, last node) {
+	n.basicCompositeNode.setRange(first, last)
+	if !n.isExtension {
+		st := *first.start()
+		st.Col += n.offset
+		n.st = &st
+		en := st
+		en.Col += n.length
+		n.en = &en
+	}
+}
+
+type valueNode interface {
+	node
+	value() interface{}
+}
+
+var _ valueNode = (*stringLiteralNode)(nil)
+var _ valueNode = (*intLiteralNode)(nil)
+var _ valueNode = (*negativeIntLiteralNode)(nil)
+var _ valueNode = (*floatLiteralNode)(nil)
+var _ valueNode = (*boolLiteralNode)(nil)
+var _ valueNode = (*sliceLiteralNode)(nil)
+var _ valueNode = (*aggregateLiteralNode)(nil)
+var _ valueNode = (*noSourceNode)(nil)
+
+type stringLiteralNode struct {
+	basicCompositeNode
+	val string
+}
+
+func (n *stringLiteralNode) value() interface{} {
+	return n.val
+}
+
+func (n *stringLiteralNode) popLeadingComment() *comment {
+	return n.first.(terminalNode).popLeadingComment()
+}
+
+func (n *stringLiteralNode) pushTrailingComment(c *comment) {
+	n.last.(terminalNode).pushTrailingComment(c)
+}
+
+type intLiteralNode struct {
+	basicNode
+	val uint64
+}
+
+func (n *intLiteralNode) value() interface{} {
+	return n.val
+}
+
+type negativeIntLiteralNode struct {
+	basicCompositeNode
+	val int64
+}
+
+func (n *negativeIntLiteralNode) value() interface{} {
+	return n.val
+}
+
+type floatLiteralNode struct {
+	basicCompositeNode
+	val float64
+}
+
+func (n *floatLiteralNode) value() interface{} {
+	return n.val
+}
+
+func (n *floatLiteralNode) popLeadingComment() *comment {
+	return n.first.(terminalNode).popLeadingComment()
+}
+
+func (n *floatLiteralNode) pushTrailingComment(c *comment) {
+	n.last.(terminalNode).pushTrailingComment(c)
+}
+
+type boolLiteralNode struct {
+	basicNode
+	val bool
+}
+
+func (n *boolLiteralNode) value() interface{} {
+	return n.val
+}
+
+type sliceLiteralNode struct {
+	basicCompositeNode
+	elements []valueNode
+}
+
+func (n *sliceLiteralNode) value() interface{} {
+	return n.elements
+}
+
+type aggregateLiteralNode struct {
+	basicCompositeNode
+	elements []*aggregateEntryNode
+}
+
+func (n *aggregateLiteralNode) value() interface{} {
+	return n.elements
+}
+
+type aggregateEntryNode struct {
+	basicCompositeNode
+	name *aggregateNameNode
+	val  valueNode
+}
+
+type aggregateNameNode struct {
+	basicCompositeNode
+	name        *identNode
+	isExtension bool
+}
+
+func (a *aggregateNameNode) value() string {
+	if a.isExtension {
+		return "[" + a.name.val + "]"
+	} else {
+		return a.name.val
+	}
+}
+
+type fieldNode struct {
+	basicCompositeNode
+	label   *labelNode
+	fldType *identNode
+	name    *identNode
+	tag     *intLiteralNode
+	options []*optionNode
+
+	// This field is populated after parsing, to allow lookup of extendee source
+	// locations when field extendees cannot be linked. (Otherwise, this is just
+	// stored as a string in the field descriptors defined inside the extend
+	// block).
+	extendee *extendNode
+}
+
+func (n *fieldNode) fieldLabel() node {
+	// proto3 fields and fields inside one-ofs will not have a label and we need
+	// this check in order to return a nil node -- otherwise we'd return a
+	// non-nil node that has a nil pointer value in it :/
+	if n.label == nil {
+		return nil
+	}
+	return n.label
+}
+
+func (n *fieldNode) fieldName() node {
+	return n.name
+}
+
+func (n *fieldNode) fieldType() node {
+	return n.fldType
+}
+
+func (n *fieldNode) fieldTag() node {
+	return n.tag
+}
+
+func (n *fieldNode) fieldExtendee() node {
+	if n.extendee != nil {
+		return n.extendee.extendee
+	}
+	return nil
+}
+
+func (n *fieldNode) getGroupKeyword() node {
+	return nil
+}
+
+type labelNode struct {
+	basicNode
+	repeated bool
+	required bool
+}
+
+type groupNode struct {
+	basicCompositeNode
+	groupKeyword *identNode
+	label        *labelNode
+	name         *identNode
+	tag          *intLiteralNode
+	decls        []*messageElement
+
+	// This field is populated after parsing, to make it easier to find them
+	// without searching decls. The parse result has a map of descriptors to
+	// nodes which makes the other declarations easily discoverable. But these
+	// elements do not map to descriptors -- they are just stored as strings in
+	// the message descriptor.
+	reserved []*stringLiteralNode
+	// This field is populated after parsing, to allow lookup of extendee source
+	// locations when field extendees cannot be linked. (Otherwise, this is just
+	// stored as a string in the field descriptors defined inside the extend
+	// block).
+	extendee *extendNode
+}
+
+func (n *groupNode) fieldLabel() node {
+	return n.label
+}
+
+func (n *groupNode) fieldName() node {
+	return n.name
+}
+
+func (n *groupNode) fieldType() node {
+	return n.name
+}
+
+func (n *groupNode) fieldTag() node {
+	return n.tag
+}
+
+func (n *groupNode) fieldExtendee() node {
+	if n.extendee != nil {
+		return n.extendee.extendee
+	}
+	return nil
+}
+
+func (n *groupNode) getGroupKeyword() node {
+	return n.groupKeyword
+}
+
+func (n *groupNode) messageName() node {
+	return n.name
+}
+
+func (n *groupNode) reservedNames() []*stringLiteralNode {
+	return n.reserved
+}
+
+type oneOfNode struct {
+	basicCompositeNode
+	name  *identNode
+	decls []*oneOfElement
+}
+
+type oneOfElement struct {
+	// a discriminated union: only one field will be set
+	option *optionNode
+	field  *fieldNode
+	empty  *basicNode
+}
+
+func (n *oneOfElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *oneOfElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *oneOfElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *oneOfElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *oneOfElement) get() node {
+	switch {
+	case n.option != nil:
+		return n.option
+	case n.field != nil:
+		return n.field
+	default:
+		return n.empty
+	}
+}
+
+type mapFieldNode struct {
+	basicCompositeNode
+	mapKeyword *identNode
+	keyType    *identNode
+	valueType  *identNode
+	name       *identNode
+	tag        *intLiteralNode
+	options    []*optionNode
+}
+
+func (n *mapFieldNode) fieldLabel() node {
+	return n.mapKeyword
+}
+
+func (n *mapFieldNode) fieldName() node {
+	return n.name
+}
+
+func (n *mapFieldNode) fieldType() node {
+	return n.mapKeyword
+}
+
+func (n *mapFieldNode) fieldTag() node {
+	return n.tag
+}
+
+func (n *mapFieldNode) fieldExtendee() node {
+	return nil
+}
+
+func (n *mapFieldNode) getGroupKeyword() node {
+	return nil
+}
+
+func (n *mapFieldNode) messageName() node {
+	return n.name
+}
+
+func (n *mapFieldNode) reservedNames() []*stringLiteralNode {
+	return nil
+}
+
+func (n *mapFieldNode) keyField() *syntheticMapField {
+	tag := &intLiteralNode{
+		basicNode: basicNode{
+			posRange: posRange{start: n.keyType.start(), end: n.keyType.end()},
+		},
+		val: 1,
+	}
+	return &syntheticMapField{ident: n.keyType, tag: tag}
+}
+
+func (n *mapFieldNode) valueField() *syntheticMapField {
+	tag := &intLiteralNode{
+		basicNode: basicNode{
+			posRange: posRange{start: n.valueType.start(), end: n.valueType.end()},
+		},
+		val: 2,
+	}
+	return &syntheticMapField{ident: n.valueType, tag: tag}
+}
+
+type syntheticMapField struct {
+	ident *identNode
+	tag   *intLiteralNode
+}
+
+func (n *syntheticMapField) start() *SourcePos {
+	return n.ident.start()
+}
+
+func (n *syntheticMapField) end() *SourcePos {
+	return n.ident.end()
+}
+
+func (n *syntheticMapField) leadingComments() []*comment {
+	return nil
+}
+
+func (n *syntheticMapField) trailingComments() []*comment {
+	return nil
+}
+
+func (n *syntheticMapField) fieldLabel() node {
+	return n.ident
+}
+
+func (n *syntheticMapField) fieldName() node {
+	return n.ident
+}
+
+func (n *syntheticMapField) fieldType() node {
+	return n.ident
+}
+
+func (n *syntheticMapField) fieldTag() node {
+	return n.tag
+}
+
+func (n *syntheticMapField) fieldExtendee() node {
+	return nil
+}
+
+func (n *syntheticMapField) getGroupKeyword() node {
+	return nil
+}
+
+type extensionRangeNode struct {
+	basicCompositeNode
+	ranges  []*rangeNode
+	options []*optionNode
+}
+
+type rangeNode struct {
+	basicCompositeNode
+	stNode, enNode node
+	st, en         int32
+}
+
+func (n *rangeNode) rangeStart() node {
+	return n.stNode
+}
+
+func (n *rangeNode) rangeEnd() node {
+	return n.enNode
+}
+
+type reservedNode struct {
+	basicCompositeNode
+	ranges []*rangeNode
+	names  []*stringLiteralNode
+}
+
+type enumNode struct {
+	basicCompositeNode
+	name  *identNode
+	decls []*enumElement
+
+	// This field is populated after parsing, to make it easier to find them
+	// without searching decls. The parse result has a map of descriptors to
+	// nodes which makes the other declarations easily discoverable. But these
+	// elements do not map to descriptors -- they are just stored as strings in
+	// the message descriptor.
+	reserved []*stringLiteralNode
+}
+
+type enumElement struct {
+	// a discriminated union: only one field will be set
+	option   *optionNode
+	value    *enumValueNode
+	reserved *reservedNode
+	empty    *basicNode
+}
+
+func (n *enumElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *enumElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *enumElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *enumElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *enumElement) get() node {
+	switch {
+	case n.option != nil:
+		return n.option
+	case n.value != nil:
+		return n.value
+	default:
+		return n.empty
+	}
+}
+
+type enumValueNode struct {
+	basicCompositeNode
+	name    *identNode
+	options []*optionNode
+
+	// only one of these two will be set:
+
+	numberP *intLiteralNode         // positive numeric value
+	numberN *negativeIntLiteralNode // negative numeric value
+}
+
+func (n *enumValueNode) getName() node {
+	return n.name
+}
+
+func (n *enumValueNode) getNumber() node {
+	if n.numberP != nil {
+		return n.numberP
+	}
+	return n.numberN
+}
+
+type messageNode struct {
+	basicCompositeNode
+	name  *identNode
+	decls []*messageElement
+
+	// This field is populated after parsing, to make it easier to find them
+	// without searching decls. The parse result has a map of descriptors to
+	// nodes which makes the other declarations easily discoverable. But these
+	// elements do not map to descriptors -- they are just stored as strings in
+	// the message descriptor.
+	reserved []*stringLiteralNode
+}
+
+func (n *messageNode) messageName() node {
+	return n.name
+}
+
+func (n *messageNode) reservedNames() []*stringLiteralNode {
+	return n.reserved
+}
+
+type messageElement struct {
+	// a discriminated union: only one field will be set
+	option         *optionNode
+	field          *fieldNode
+	mapField       *mapFieldNode
+	oneOf          *oneOfNode
+	group          *groupNode
+	nested         *messageNode
+	enum           *enumNode
+	extend         *extendNode
+	extensionRange *extensionRangeNode
+	reserved       *reservedNode
+	empty          *basicNode
+}
+
+func (n *messageElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *messageElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *messageElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *messageElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *messageElement) get() node {
+	switch {
+	case n.option != nil:
+		return n.option
+	case n.field != nil:
+		return n.field
+	case n.mapField != nil:
+		return n.mapField
+	case n.oneOf != nil:
+		return n.oneOf
+	case n.group != nil:
+		return n.group
+	case n.nested != nil:
+		return n.nested
+	case n.enum != nil:
+		return n.enum
+	case n.extend != nil:
+		return n.extend
+	case n.extensionRange != nil:
+		return n.extensionRange
+	case n.reserved != nil:
+		return n.reserved
+	default:
+		return n.empty
+	}
+}
+
+type extendNode struct {
+	basicCompositeNode
+	extendee *identNode
+	decls    []*extendElement
+}
+
+type extendElement struct {
+	// a discriminated union: only one field will be set
+	field *fieldNode
+	group *groupNode
+	empty *basicNode
+}
+
+func (n *extendElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *extendElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *extendElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *extendElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *extendElement) get() node {
+	switch {
+	case n.field != nil:
+		return n.field
+	case n.group != nil:
+		return n.group
+	default:
+		return n.empty
+	}
+}
+
+type serviceNode struct {
+	basicCompositeNode
+	name  *identNode
+	decls []*serviceElement
+}
+
+type serviceElement struct {
+	// a discriminated union: only one field will be set
+	option *optionNode
+	rpc    *methodNode
+	empty  *basicNode
+}
+
+func (n *serviceElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *serviceElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *serviceElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *serviceElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *serviceElement) get() node {
+	switch {
+	case n.option != nil:
+		return n.option
+	case n.rpc != nil:
+		return n.rpc
+	default:
+		return n.empty
+	}
+}
+
+type methodNode struct {
+	basicCompositeNode
+	name    *identNode
+	input   *rpcTypeNode
+	output  *rpcTypeNode
+	options []*optionNode
+}
+
+func (n *methodNode) getInputType() node {
+	return n.input.msgType
+}
+
+func (n *methodNode) getOutputType() node {
+	return n.output.msgType
+}
+
+type rpcTypeNode struct {
+	basicCompositeNode
+	msgType       *identNode
+	streamKeyword node
+}
+
+type noSourceNode struct {
+	pos *SourcePos
+}
+
+func (n noSourceNode) start() *SourcePos {
+	return n.pos
+}
+
+func (n noSourceNode) end() *SourcePos {
+	return n.pos
+}
+
+func (n noSourceNode) leadingComments() []*comment {
+	return nil
+}
+
+func (n noSourceNode) trailingComments() []*comment {
+	return nil
+}
+
+func (n noSourceNode) getSyntax() node {
+	return n
+}
+
+func (n noSourceNode) getName() node {
+	return n
+}
+
+func (n noSourceNode) getValue() valueNode {
+	return n
+}
+
+func (n noSourceNode) fieldLabel() node {
+	return n
+}
+
+func (n noSourceNode) fieldName() node {
+	return n
+}
+
+func (n noSourceNode) fieldType() node {
+	return n
+}
+
+func (n noSourceNode) fieldTag() node {
+	return n
+}
+
+func (n noSourceNode) fieldExtendee() node {
+	return n
+}
+
+func (n noSourceNode) getGroupKeyword() node {
+	return n
+}
+
+func (n noSourceNode) rangeStart() node {
+	return n
+}
+
+func (n noSourceNode) rangeEnd() node {
+	return n
+}
+
+func (n noSourceNode) getNumber() node {
+	return n
+}
+
+func (n noSourceNode) messageName() node {
+	return n
+}
+
+func (n noSourceNode) reservedNames() []*stringLiteralNode {
+	return nil
+}
+
+func (n noSourceNode) getInputType() node {
+	return n
+}
+
+func (n noSourceNode) getOutputType() node {
+	return n
+}
+
+func (n noSourceNode) value() interface{} {
+	return nil
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/doc.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/doc.go
new file mode 100644
index 0000000..c6446d3
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/doc.go
@@ -0,0 +1,10 @@
+// Package protoparse provides functionality for parsing *.proto source files
+// into descriptors that can be used with other protoreflect packages, like
+// dynamic messages and dynamic GRPC clients.
+//
+// This package links in other packages that include compiled descriptors for
+// the various "google/protobuf/*.proto" files that are included with protoc.
+// That way, like when invoking protoc, programs need not supply copies of these
+// "builtin" files. Though if copies of the files are provided, they will be
+// used instead of the builtin descriptors.
+package protoparse
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/lexer.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/lexer.go
new file mode 100644
index 0000000..c685e56
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/lexer.go
@@ -0,0 +1,766 @@
+package protoparse
+
+import (
+	"bufio"
+	"bytes"
+	"errors"
+	"fmt"
+	"io"
+	"strconv"
+	"strings"
+	"unicode/utf8"
+)
+
+type runeReader struct {
+	rr     *bufio.Reader
+	unread []rune
+	err    error
+}
+
+func (rr *runeReader) readRune() (r rune, size int, err error) {
+	if rr.err != nil {
+		return 0, 0, rr.err
+	}
+	if len(rr.unread) > 0 {
+		r := rr.unread[len(rr.unread)-1]
+		rr.unread = rr.unread[:len(rr.unread)-1]
+		return r, utf8.RuneLen(r), nil
+	}
+	r, sz, err := rr.rr.ReadRune()
+	if err != nil {
+		rr.err = err
+	}
+	return r, sz, err
+}
+
+func (rr *runeReader) unreadRune(r rune) {
+	rr.unread = append(rr.unread, r)
+}
+
+func lexError(l protoLexer, pos *SourcePos, err string) {
+	pl := l.(*protoLex)
+	if pl.err == nil {
+		pl.err = ErrorWithSourcePos{Underlying: errors.New(err), Pos: pos}
+	}
+}
+
+type protoLex struct {
+	filename string
+	input    *runeReader
+	err      error
+	res      *fileNode
+
+	lineNo int
+	colNo  int
+	offset int
+
+	prevSym terminalNode
+}
+
+func newLexer(in io.Reader) *protoLex {
+	return &protoLex{input: &runeReader{rr: bufio.NewReader(in)}}
+}
+
+var keywords = map[string]int{
+	"syntax":     _SYNTAX,
+	"import":     _IMPORT,
+	"weak":       _WEAK,
+	"public":     _PUBLIC,
+	"package":    _PACKAGE,
+	"option":     _OPTION,
+	"true":       _TRUE,
+	"false":      _FALSE,
+	"inf":        _INF,
+	"nan":        _NAN,
+	"repeated":   _REPEATED,
+	"optional":   _OPTIONAL,
+	"required":   _REQUIRED,
+	"double":     _DOUBLE,
+	"float":      _FLOAT,
+	"int32":      _INT32,
+	"int64":      _INT64,
+	"uint32":     _UINT32,
+	"uint64":     _UINT64,
+	"sint32":     _SINT32,
+	"sint64":     _SINT64,
+	"fixed32":    _FIXED32,
+	"fixed64":    _FIXED64,
+	"sfixed32":   _SFIXED32,
+	"sfixed64":   _SFIXED64,
+	"bool":       _BOOL,
+	"string":     _STRING,
+	"bytes":      _BYTES,
+	"group":      _GROUP,
+	"oneof":      _ONEOF,
+	"map":        _MAP,
+	"extensions": _EXTENSIONS,
+	"to":         _TO,
+	"max":        _MAX,
+	"reserved":   _RESERVED,
+	"enum":       _ENUM,
+	"message":    _MESSAGE,
+	"extend":     _EXTEND,
+	"service":    _SERVICE,
+	"rpc":        _RPC,
+	"stream":     _STREAM,
+	"returns":    _RETURNS,
+}
+
+func (l *protoLex) cur() *SourcePos {
+	return &SourcePos{
+		Filename: l.filename,
+		Offset:   l.offset,
+		Line:     l.lineNo + 1,
+		Col:      l.colNo + 1,
+	}
+}
+
+func (l *protoLex) prev() *SourcePos {
+	if l.prevSym == nil {
+		return &SourcePos{
+			Filename: l.filename,
+			Offset:   0,
+			Line:     1,
+			Col:      1,
+		}
+	}
+	return l.prevSym.start()
+}
+
+func (l *protoLex) Lex(lval *protoSymType) int {
+	if l.err != nil {
+		// if we are already in a failed state, bail
+		lval.err = l.err
+		return _ERROR
+	}
+
+	prevLineNo := l.lineNo
+	prevColNo := l.colNo
+	prevOffset := l.offset
+	var comments []*comment
+
+	pos := func() posRange {
+		return posRange{
+			start: &SourcePos{
+				Filename: l.filename,
+				Offset:   prevOffset,
+				Line:     prevLineNo + 1,
+				Col:      prevColNo + 1,
+			},
+			end: l.cur(),
+		}
+	}
+	basic := func() basicNode {
+		return basicNode{
+			posRange: pos(),
+			leading:  comments,
+		}
+	}
+	setPrev := func(n terminalNode) {
+		nStart := n.start().Line
+		if _, ok := n.(*basicNode); ok {
+			// if the node is a simple rune, don't attribute comments to it
+			// HACK: adjusting the start line makes leading comments appear
+			// detached so logic below will naturally associated trailing
+			// comment to previous symbol
+			nStart += 2
+		}
+		if l.prevSym != nil && len(n.leadingComments()) > 0 && l.prevSym.end().Line < nStart {
+			// we may need to re-attribute the first comment to
+			// instead be previous node's trailing comment
+			prevEnd := l.prevSym.end().Line
+			comments := n.leadingComments()
+			c := comments[0]
+			commentStart := c.start.Line
+			if commentStart == prevEnd {
+				// comment is on same line as previous symbol
+				n.popLeadingComment()
+				l.prevSym.pushTrailingComment(c)
+			} else if commentStart == prevEnd+1 {
+				// comment is right after previous symbol; see if it is detached
+				// and if so re-attribute
+				singleLineStyle := strings.HasPrefix(c.text, "//")
+				line := c.end.Line
+				groupEnd := -1
+				for i := 1; i < len(comments); i++ {
+					c := comments[i]
+					newGroup := false
+					if !singleLineStyle || c.start.Line > line+1 {
+						// we've found a gap between comments, which means the
+						// previous comments were detached
+						newGroup = true
+					} else {
+						line = c.end.Line
+						singleLineStyle = strings.HasPrefix(comments[i].text, "//")
+						if !singleLineStyle {
+							// we've found a switch from // comments to /*
+							// consider that a new group which means the
+							// previous comments were detached
+							newGroup = true
+						}
+					}
+					if newGroup {
+						groupEnd = i
+						break
+					}
+				}
+
+				if groupEnd == -1 {
+					// just one group of comments; we'll mark it as a trailing
+					// comment if it immediately follows previous symbol and is
+					// detached from current symbol
+					c1 := comments[0]
+					c2 := comments[len(comments)-1]
+					if c1.start.Line <= prevEnd+1 && c2.end.Line < nStart-1 {
+						groupEnd = len(comments)
+					}
+				}
+
+				for i := 0; i < groupEnd; i++ {
+					l.prevSym.pushTrailingComment(n.popLeadingComment())
+				}
+			}
+		}
+
+		l.prevSym = n
+	}
+	setString := func(val string) {
+		b := basic()
+		lval.str = &stringLiteralNode{val: val}
+		lval.str.setRange(&b, &b)
+		setPrev(lval.str)
+	}
+	setIdent := func(val string, kind identKind) {
+		lval.id = &identNode{basicNode: basic(), val: val, kind: kind}
+		setPrev(lval.id)
+	}
+	setInt := func(val uint64) {
+		lval.ui = &intLiteralNode{basicNode: basic(), val: val}
+		setPrev(lval.ui)
+	}
+	setFloat := func(val float64) {
+		b := basic()
+		lval.f = &floatLiteralNode{val: val}
+		lval.f.setRange(&b, &b)
+		setPrev(lval.f)
+	}
+	setRune := func() {
+		b := basic()
+		lval.b = &b
+		setPrev(lval.b)
+	}
+	setError := func(err error) {
+		lval.err = err
+		l.err = err
+	}
+
+	for {
+		c, n, err := l.input.readRune()
+		if err == io.EOF {
+			// we're not actually returning a rune, but this will associate
+			// accumulated comments as a trailing comment on last symbol
+			// (if appropriate)
+			setRune()
+			return 0
+		} else if err != nil {
+			setError(err)
+			return _ERROR
+		}
+
+		prevLineNo = l.lineNo
+		prevColNo = l.colNo
+		prevOffset = l.offset
+
+		l.offset += n
+		if c == '\n' {
+			l.colNo = 0
+			l.lineNo++
+			continue
+		} else if c == '\r' {
+			continue
+		}
+		l.colNo++
+		if c == ' ' || c == '\t' {
+			continue
+		}
+
+		if c == '.' {
+			// tokens that start with a dot include type names and decimal literals
+			cn, _, err := l.input.readRune()
+			if err != nil {
+				setRune()
+				return int(c)
+			}
+			if cn == '_' || (cn >= 'a' && cn <= 'z') || (cn >= 'A' && cn <= 'Z') {
+				l.colNo++
+				token := []rune{c, cn}
+				token = l.readIdentifier(token)
+				setIdent(string(token), identTypeName)
+				return _TYPENAME
+			}
+			if cn >= '0' && cn <= '9' {
+				l.colNo++
+				token := []rune{c, cn}
+				token = l.readNumber(token, false, true)
+				f, err := strconv.ParseFloat(string(token), 64)
+				if err != nil {
+					setError(err)
+					return _ERROR
+				}
+				setFloat(f)
+				return _FLOAT_LIT
+			}
+			l.input.unreadRune(cn)
+			setRune()
+			return int(c)
+		}
+
+		if c == '_' || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') {
+			// identifier
+			token := []rune{c}
+			token = l.readIdentifier(token)
+			str := string(token)
+			if strings.Contains(str, ".") {
+				setIdent(str, identQualified)
+				return _FQNAME
+			}
+			if t, ok := keywords[str]; ok {
+				setIdent(str, identSimpleName)
+				return t
+			}
+			setIdent(str, identSimpleName)
+			return _NAME
+		}
+
+		if c >= '0' && c <= '9' {
+			// integer or float literal
+			if c == '0' {
+				cn, _, err := l.input.readRune()
+				if err != nil {
+					setInt(0)
+					return _INT_LIT
+				}
+				if cn == 'x' || cn == 'X' {
+					cnn, _, err := l.input.readRune()
+					if err != nil {
+						l.input.unreadRune(cn)
+						setInt(0)
+						return _INT_LIT
+					}
+					if (cnn >= '0' && cnn <= '9') || (cnn >= 'a' && cnn <= 'f') || (cnn >= 'A' && cnn <= 'F') {
+						// hexadecimal!
+						l.colNo += 2
+						token := []rune{cnn}
+						token = l.readHexNumber(token)
+						ui, err := strconv.ParseUint(string(token), 16, 64)
+						if err != nil {
+							setError(err)
+							return _ERROR
+						}
+						setInt(ui)
+						return _INT_LIT
+					}
+					l.input.unreadRune(cnn)
+					l.input.unreadRune(cn)
+					setInt(0)
+					return _INT_LIT
+				} else {
+					l.input.unreadRune(cn)
+				}
+			}
+			token := []rune{c}
+			token = l.readNumber(token, true, true)
+			numstr := string(token)
+			if strings.Contains(numstr, ".") || strings.Contains(numstr, "e") || strings.Contains(numstr, "E") {
+				// floating point!
+				f, err := strconv.ParseFloat(numstr, 64)
+				if err != nil {
+					setError(err)
+					return _ERROR
+				}
+				setFloat(f)
+				return _FLOAT_LIT
+			}
+			// integer! (decimal or octal)
+			ui, err := strconv.ParseUint(numstr, 0, 64)
+			if err != nil {
+				setError(err)
+				return _ERROR
+			}
+			setInt(ui)
+			return _INT_LIT
+		}
+
+		if c == '\'' || c == '"' {
+			// string literal
+			str, err := l.readStringLiteral(c)
+			if err != nil {
+				setError(err)
+				return _ERROR
+			}
+			setString(str)
+			return _STRING_LIT
+		}
+
+		if c == '/' {
+			// comment
+			cn, _, err := l.input.readRune()
+			if err != nil {
+				setRune()
+				return int(c)
+			}
+			if cn == '/' {
+				l.colNo++
+				hitNewline, txt := l.skipToEndOfLineComment()
+				commentPos := pos()
+				commentPos.end.Col++
+				if hitNewline {
+					l.colNo = 0
+					l.lineNo++
+				}
+				comments = append(comments, &comment{posRange: commentPos, text: txt})
+				continue
+			}
+			if cn == '*' {
+				l.colNo++
+				if txt, ok := l.skipToEndOfBlockComment(); !ok {
+					setError(errors.New("block comment never terminates, unexpected EOF"))
+					return _ERROR
+				} else {
+					comments = append(comments, &comment{posRange: pos(), text: txt})
+				}
+				continue
+			}
+			l.input.unreadRune(cn)
+		}
+
+		setRune()
+		return int(c)
+	}
+}
+
+func (l *protoLex) readNumber(sofar []rune, allowDot bool, allowExp bool) []rune {
+	token := sofar
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			break
+		}
+		if c == '.' {
+			if !allowDot {
+				l.input.unreadRune(c)
+				break
+			}
+			allowDot = false
+			cn, _, err := l.input.readRune()
+			if err != nil {
+				l.input.unreadRune(c)
+				break
+			}
+			if cn < '0' || cn > '9' {
+				l.input.unreadRune(cn)
+				l.input.unreadRune(c)
+				break
+			}
+			l.colNo++
+			token = append(token, c)
+			c = cn
+		} else if c == 'e' || c == 'E' {
+			if !allowExp {
+				l.input.unreadRune(c)
+				break
+			}
+			allowExp = false
+			cn, _, err := l.input.readRune()
+			if err != nil {
+				l.input.unreadRune(c)
+				break
+			}
+			if cn == '-' || cn == '+' {
+				cnn, _, err := l.input.readRune()
+				if err != nil {
+					l.input.unreadRune(cn)
+					l.input.unreadRune(c)
+					break
+				}
+				if cnn < '0' || cnn > '9' {
+					l.input.unreadRune(cnn)
+					l.input.unreadRune(cn)
+					l.input.unreadRune(c)
+					break
+				}
+				l.colNo++
+				token = append(token, c)
+				c = cn
+				cn = cnn
+			} else if cn < '0' || cn > '9' {
+				l.input.unreadRune(cn)
+				l.input.unreadRune(c)
+				break
+			}
+			l.colNo++
+			token = append(token, c)
+			c = cn
+		} else if c < '0' || c > '9' {
+			l.input.unreadRune(c)
+			break
+		}
+		l.colNo++
+		token = append(token, c)
+	}
+	return token
+}
+
+func (l *protoLex) readHexNumber(sofar []rune) []rune {
+	token := sofar
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			break
+		}
+		if (c < 'a' || c > 'f') && (c < 'A' || c > 'F') && (c < '0' || c > '9') {
+			l.input.unreadRune(c)
+			break
+		}
+		l.colNo++
+		token = append(token, c)
+	}
+	return token
+}
+
+func (l *protoLex) readIdentifier(sofar []rune) []rune {
+	token := sofar
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			break
+		}
+		if c == '.' {
+			cn, _, err := l.input.readRune()
+			if err != nil {
+				l.input.unreadRune(c)
+				break
+			}
+			if cn != '_' && (cn < 'a' || cn > 'z') && (cn < 'A' || cn > 'Z') {
+				l.input.unreadRune(cn)
+				l.input.unreadRune(c)
+				break
+			}
+			l.colNo++
+			token = append(token, c)
+			c = cn
+		} else if c != '_' && (c < 'a' || c > 'z') && (c < 'A' || c > 'Z') && (c < '0' || c > '9') {
+			l.input.unreadRune(c)
+			break
+		}
+		l.colNo++
+		token = append(token, c)
+	}
+	return token
+}
+
+func (l *protoLex) readStringLiteral(quote rune) (string, error) {
+	var buf bytes.Buffer
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			if err == io.EOF {
+				err = io.ErrUnexpectedEOF
+			}
+			return "", err
+		}
+		if c == '\n' {
+			l.colNo = 0
+			l.lineNo++
+			return "", errors.New("encountered end-of-line before end of string literal")
+		}
+		l.colNo++
+		if c == quote {
+			break
+		}
+		if c == 0 {
+			return "", errors.New("null character ('\\0') not allowed in string literal")
+		}
+		if c == '\\' {
+			// escape sequence
+			c, _, err = l.input.readRune()
+			if err != nil {
+				return "", err
+			}
+			l.colNo++
+			if c == 'x' || c == 'X' {
+				// hex escape
+				c, _, err := l.input.readRune()
+				if err != nil {
+					return "", err
+				}
+				l.colNo++
+				c2, _, err := l.input.readRune()
+				if err != nil {
+					return "", err
+				}
+				var hex string
+				if (c2 < '0' || c2 > '9') && (c2 < 'a' || c2 > 'f') && (c2 < 'A' || c2 > 'F') {
+					l.input.unreadRune(c2)
+					hex = string(c)
+				} else {
+					l.colNo++
+					hex = string([]rune{c, c2})
+				}
+				i, err := strconv.ParseInt(hex, 16, 32)
+				if err != nil {
+					return "", fmt.Errorf("invalid hex escape: \\x%q", hex)
+				}
+				buf.WriteByte(byte(i))
+
+			} else if c >= '0' && c <= '7' {
+				// octal escape
+				c2, _, err := l.input.readRune()
+				if err != nil {
+					return "", err
+				}
+				var octal string
+				if c2 < '0' || c2 > '7' {
+					l.input.unreadRune(c2)
+					octal = string(c)
+				} else {
+					l.colNo++
+					c3, _, err := l.input.readRune()
+					if err != nil {
+						return "", err
+					}
+					if c3 < '0' || c3 > '7' {
+						l.input.unreadRune(c3)
+						octal = string([]rune{c, c2})
+					} else {
+						l.colNo++
+						octal = string([]rune{c, c2, c3})
+					}
+				}
+				i, err := strconv.ParseInt(octal, 8, 32)
+				if err != nil {
+					return "", fmt.Errorf("invalid octal escape: \\%q", octal)
+				}
+				if i > 0xff {
+					return "", fmt.Errorf("octal escape is out range, must be between 0 and 377: \\%q", octal)
+				}
+				buf.WriteByte(byte(i))
+
+			} else if c == 'u' {
+				// short unicode escape
+				u := make([]rune, 4)
+				for i := range u {
+					c, _, err := l.input.readRune()
+					if err != nil {
+						return "", err
+					}
+					l.colNo++
+					u[i] = c
+				}
+				i, err := strconv.ParseInt(string(u), 16, 32)
+				if err != nil {
+					return "", fmt.Errorf("invalid unicode escape: \\u%q", string(u))
+				}
+				buf.WriteRune(rune(i))
+
+			} else if c == 'U' {
+				// long unicode escape
+				u := make([]rune, 8)
+				for i := range u {
+					c, _, err := l.input.readRune()
+					if err != nil {
+						return "", err
+					}
+					l.colNo++
+					u[i] = c
+				}
+				i, err := strconv.ParseInt(string(u), 16, 32)
+				if err != nil {
+					return "", fmt.Errorf("invalid unicode escape: \\U%q", string(u))
+				}
+				if i > 0x10ffff || i < 0 {
+					return "", fmt.Errorf("unicode escape is out of range, must be between 0 and 0x10ffff: \\U%q", string(u))
+				}
+				buf.WriteRune(rune(i))
+
+			} else if c == 'a' {
+				buf.WriteByte('\a')
+			} else if c == 'b' {
+				buf.WriteByte('\b')
+			} else if c == 'f' {
+				buf.WriteByte('\f')
+			} else if c == 'n' {
+				buf.WriteByte('\n')
+			} else if c == 'r' {
+				buf.WriteByte('\r')
+			} else if c == 't' {
+				buf.WriteByte('\t')
+			} else if c == 'v' {
+				buf.WriteByte('\v')
+			} else if c == '\\' {
+				buf.WriteByte('\\')
+			} else if c == '\'' {
+				buf.WriteByte('\'')
+			} else if c == '"' {
+				buf.WriteByte('"')
+			} else if c == '?' {
+				buf.WriteByte('?')
+			} else {
+				return "", fmt.Errorf("invalid escape sequence: %q", "\\"+string(c))
+			}
+		} else {
+			buf.WriteRune(c)
+		}
+	}
+	return buf.String(), nil
+}
+
+func (l *protoLex) skipToEndOfLineComment() (bool, string) {
+	txt := []rune{'/', '/'}
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			return false, string(txt)
+		}
+		if c == '\n' {
+			return true, string(txt)
+		}
+		l.colNo++
+		txt = append(txt, c)
+	}
+}
+
+func (l *protoLex) skipToEndOfBlockComment() (string, bool) {
+	txt := []rune{'/', '*'}
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			return "", false
+		}
+		if c == '\n' {
+			l.colNo = 0
+			l.lineNo++
+		} else {
+			l.colNo++
+		}
+		txt = append(txt, c)
+		if c == '*' {
+			c, _, err := l.input.readRune()
+			if err != nil {
+				return "", false
+			}
+			if c == '/' {
+				l.colNo++
+				txt = append(txt, c)
+				return string(txt), true
+			}
+			l.input.unreadRune(c)
+		}
+	}
+}
+
+func (l *protoLex) Error(s string) {
+	if l.err == nil {
+		l.err = ErrorWithSourcePos{Underlying: errors.New(s), Pos: l.prevSym.start()}
+	}
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/linker.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/linker.go
new file mode 100644
index 0000000..c150936
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/linker.go
@@ -0,0 +1,652 @@
+package protoparse
+
+import (
+	"bytes"
+	"fmt"
+	"sort"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/desc"
+	"github.com/jhump/protoreflect/desc/internal"
+)
+
+type linker struct {
+	files          map[string]*parseResult
+	descriptorPool map[*dpb.FileDescriptorProto]map[string]proto.Message
+	extensions     map[string]map[int32]string
+}
+
+func newLinker(files map[string]*parseResult) *linker {
+	return &linker{files: files}
+}
+
+func (l *linker) linkFiles() (map[string]*desc.FileDescriptor, error) {
+	// First, we put all symbols into a single pool, which lets us ensure there
+	// are no duplicate symbols and will also let us resolve and revise all type
+	// references in next step.
+	if err := l.createDescriptorPool(); err != nil {
+		return nil, err
+	}
+
+	// After we've populated the pool, we can now try to resolve all type
+	// references. All references must be checked for correct type, any fields
+	// with enum types must be corrected (since we parse them as if they are
+	// message references since we don't actually know message or enum until
+	// link time), and references will be re-written to be fully-qualified
+	// references (e.g. start with a dot ".").
+	if err := l.resolveReferences(); err != nil {
+		return nil, err
+	}
+
+	// Now we've validated the descriptors, so we can link them into rich
+	// descriptors. This is a little redundant since that step does similar
+	// checking of symbols. But, without breaking encapsulation (e.g. exporting
+	// a lot of fields from desc package that are currently unexported) or
+	// merging this into the same package, we can't really prevent it.
+	linked, err := l.createdLinkedDescriptors()
+	if err != nil {
+		return nil, err
+	}
+
+	// Now that we have linked descriptors, we can interpret any uninterpreted
+	// options that remain.
+	for _, r := range l.files {
+		fd := linked[r.fd.GetName()]
+		if err := interpretFileOptions(r, richFileDescriptorish{FileDescriptor: fd}); err != nil {
+			return nil, err
+		}
+	}
+
+	return linked, nil
+}
+
+func (l *linker) createDescriptorPool() error {
+	l.descriptorPool = map[*dpb.FileDescriptorProto]map[string]proto.Message{}
+	for _, r := range l.files {
+		fd := r.fd
+		pool := map[string]proto.Message{}
+		l.descriptorPool[fd] = pool
+		prefix := fd.GetPackage()
+		if prefix != "" {
+			prefix += "."
+		}
+		for _, md := range fd.MessageType {
+			if err := addMessageToPool(r, pool, prefix, md); err != nil {
+				return err
+			}
+		}
+		for _, fld := range fd.Extension {
+			if err := addFieldToPool(r, pool, prefix, fld); err != nil {
+				return err
+			}
+		}
+		for _, ed := range fd.EnumType {
+			if err := addEnumToPool(r, pool, prefix, ed); err != nil {
+				return err
+			}
+		}
+		for _, sd := range fd.Service {
+			if err := addServiceToPool(r, pool, prefix, sd); err != nil {
+				return err
+			}
+		}
+	}
+	// try putting everything into a single pool, to ensure there are no duplicates
+	// across files (e.g. same symbol, but declared in two different files)
+	type entry struct {
+		file string
+		msg  proto.Message
+	}
+	pool := map[string]entry{}
+	for f, p := range l.descriptorPool {
+		for k, v := range p {
+			if e, ok := pool[k]; ok {
+				desc1 := e.msg
+				file1 := e.file
+				desc2 := v
+				file2 := f.GetName()
+				if file2 < file1 {
+					file1, file2 = file2, file1
+					desc1, desc2 = desc2, desc1
+				}
+				node := l.files[file2].nodes[desc2]
+				return ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("duplicate symbol %s: already defined as %s in %q", k, descriptorType(desc1), file1)}
+			}
+			pool[k] = entry{file: f.GetName(), msg: v}
+		}
+	}
+
+	return nil
+}
+
+func addMessageToPool(r *parseResult, pool map[string]proto.Message, prefix string, md *dpb.DescriptorProto) error {
+	fqn := prefix + md.GetName()
+	if err := addToPool(r, pool, fqn, md); err != nil {
+		return err
+	}
+	prefix = fqn + "."
+	for _, fld := range md.Field {
+		if err := addFieldToPool(r, pool, prefix, fld); err != nil {
+			return err
+		}
+	}
+	for _, fld := range md.Extension {
+		if err := addFieldToPool(r, pool, prefix, fld); err != nil {
+			return err
+		}
+	}
+	for _, nmd := range md.NestedType {
+		if err := addMessageToPool(r, pool, prefix, nmd); err != nil {
+			return err
+		}
+	}
+	for _, ed := range md.EnumType {
+		if err := addEnumToPool(r, pool, prefix, ed); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func addFieldToPool(r *parseResult, pool map[string]proto.Message, prefix string, fld *dpb.FieldDescriptorProto) error {
+	fqn := prefix + fld.GetName()
+	return addToPool(r, pool, fqn, fld)
+}
+
+func addEnumToPool(r *parseResult, pool map[string]proto.Message, prefix string, ed *dpb.EnumDescriptorProto) error {
+	fqn := prefix + ed.GetName()
+	if err := addToPool(r, pool, fqn, ed); err != nil {
+		return err
+	}
+	for _, evd := range ed.Value {
+		vfqn := fqn + "." + evd.GetName()
+		if err := addToPool(r, pool, vfqn, evd); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func addServiceToPool(r *parseResult, pool map[string]proto.Message, prefix string, sd *dpb.ServiceDescriptorProto) error {
+	fqn := prefix + sd.GetName()
+	if err := addToPool(r, pool, fqn, sd); err != nil {
+		return err
+	}
+	for _, mtd := range sd.Method {
+		mfqn := fqn + "." + mtd.GetName()
+		if err := addToPool(r, pool, mfqn, mtd); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func addToPool(r *parseResult, pool map[string]proto.Message, fqn string, dsc proto.Message) error {
+	if d, ok := pool[fqn]; ok {
+		node := r.nodes[dsc]
+		return ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("duplicate symbol %s: already defined as %s", fqn, descriptorType(d))}
+	}
+	pool[fqn] = dsc
+	return nil
+}
+
+func descriptorType(m proto.Message) string {
+	switch m := m.(type) {
+	case *dpb.DescriptorProto:
+		return "message"
+	case *dpb.DescriptorProto_ExtensionRange:
+		return "extension range"
+	case *dpb.FieldDescriptorProto:
+		if m.GetExtendee() == "" {
+			return "field"
+		} else {
+			return "extension"
+		}
+	case *dpb.EnumDescriptorProto:
+		return "enum"
+	case *dpb.EnumValueDescriptorProto:
+		return "enum value"
+	case *dpb.ServiceDescriptorProto:
+		return "service"
+	case *dpb.MethodDescriptorProto:
+		return "method"
+	case *dpb.FileDescriptorProto:
+		return "file"
+	default:
+		// shouldn't be possible
+		return fmt.Sprintf("%T", m)
+	}
+}
+
+func (l *linker) resolveReferences() error {
+	l.extensions = map[string]map[int32]string{}
+	for _, r := range l.files {
+		fd := r.fd
+		prefix := fd.GetPackage()
+		scopes := []scope{fileScope(fd, l)}
+		if prefix != "" {
+			prefix += "."
+		}
+		if fd.Options != nil {
+			if err := l.resolveOptions(r, fd, "file", fd.GetName(), proto.MessageName(fd.Options), fd.Options.UninterpretedOption, scopes); err != nil {
+				return err
+			}
+		}
+		for _, md := range fd.MessageType {
+			if err := l.resolveMessageTypes(r, fd, prefix, md, scopes); err != nil {
+				return err
+			}
+		}
+		for _, fld := range fd.Extension {
+			if err := l.resolveFieldTypes(r, fd, prefix, fld, scopes); err != nil {
+				return err
+			}
+		}
+		for _, ed := range fd.EnumType {
+			if err := l.resolveEnumTypes(r, fd, prefix, ed, scopes); err != nil {
+				return err
+			}
+		}
+		for _, sd := range fd.Service {
+			if err := l.resolveServiceTypes(r, fd, prefix, sd, scopes); err != nil {
+				return err
+			}
+		}
+	}
+	return nil
+}
+
+func (l *linker) resolveEnumTypes(r *parseResult, fd *dpb.FileDescriptorProto, prefix string, ed *dpb.EnumDescriptorProto, scopes []scope) error {
+	enumFqn := prefix + ed.GetName()
+	if ed.Options != nil {
+		if err := l.resolveOptions(r, fd, "enum", enumFqn, proto.MessageName(ed.Options), ed.Options.UninterpretedOption, scopes); err != nil {
+			return err
+		}
+	}
+	for _, evd := range ed.Value {
+		if evd.Options != nil {
+			evFqn := enumFqn + "." + evd.GetName()
+			if err := l.resolveOptions(r, fd, "enum value", evFqn, proto.MessageName(evd.Options), evd.Options.UninterpretedOption, scopes); err != nil {
+				return err
+			}
+		}
+	}
+	return nil
+}
+
+func (l *linker) resolveMessageTypes(r *parseResult, fd *dpb.FileDescriptorProto, prefix string, md *dpb.DescriptorProto, scopes []scope) error {
+	fqn := prefix + md.GetName()
+	scope := messageScope(fqn, isProto3(fd), l.descriptorPool[fd])
+	scopes = append(scopes, scope)
+	prefix = fqn + "."
+
+	if md.Options != nil {
+		if err := l.resolveOptions(r, fd, "message", fqn, proto.MessageName(md.Options), md.Options.UninterpretedOption, scopes); err != nil {
+			return err
+		}
+	}
+
+	for _, nmd := range md.NestedType {
+		if err := l.resolveMessageTypes(r, fd, prefix, nmd, scopes); err != nil {
+			return err
+		}
+	}
+	for _, ned := range md.EnumType {
+		if err := l.resolveEnumTypes(r, fd, prefix, ned, scopes); err != nil {
+			return err
+		}
+	}
+	for _, fld := range md.Field {
+		if err := l.resolveFieldTypes(r, fd, prefix, fld, scopes); err != nil {
+			return err
+		}
+	}
+	for _, fld := range md.Extension {
+		if err := l.resolveFieldTypes(r, fd, prefix, fld, scopes); err != nil {
+			return err
+		}
+	}
+	for _, er := range md.ExtensionRange {
+		if er.Options != nil {
+			erName := fmt.Sprintf("%s:%d-%d", fqn, er.GetStart(), er.GetEnd()-1)
+			if err := l.resolveOptions(r, fd, "extension range", erName, proto.MessageName(er.Options), er.Options.UninterpretedOption, scopes); err != nil {
+				return err
+			}
+		}
+	}
+	return nil
+}
+
+func (l *linker) resolveFieldTypes(r *parseResult, fd *dpb.FileDescriptorProto, prefix string, fld *dpb.FieldDescriptorProto, scopes []scope) error {
+	thisName := prefix + fld.GetName()
+	scope := fmt.Sprintf("field %s", thisName)
+	node := r.getFieldNode(fld)
+	elemType := "field"
+	if fld.GetExtendee() != "" {
+		fqn, dsc, _ := l.resolve(fd, fld.GetExtendee(), isMessage, scopes)
+		if dsc == nil {
+			return ErrorWithSourcePos{Pos: node.fieldExtendee().start(), Underlying: fmt.Errorf("unknown extendee type %s", fld.GetExtendee())}
+		}
+		extd, ok := dsc.(*dpb.DescriptorProto)
+		if !ok {
+			otherType := descriptorType(dsc)
+			return ErrorWithSourcePos{Pos: node.fieldExtendee().start(), Underlying: fmt.Errorf("extendee is invalid: %s is a %s, not a message", fqn, otherType)}
+		}
+		fld.Extendee = proto.String("." + fqn)
+		// make sure the tag number is in range
+		found := false
+		tag := fld.GetNumber()
+		for _, rng := range extd.ExtensionRange {
+			if tag >= rng.GetStart() && tag < rng.GetEnd() {
+				found = true
+				break
+			}
+		}
+		if !found {
+			return ErrorWithSourcePos{Pos: node.fieldTag().start(), Underlying: fmt.Errorf("%s: tag %d is not in valid range for extended type %s", scope, tag, fqn)}
+		}
+		// make sure tag is not a duplicate
+		usedExtTags := l.extensions[fqn]
+		if usedExtTags == nil {
+			usedExtTags = map[int32]string{}
+			l.extensions[fqn] = usedExtTags
+		}
+		if other := usedExtTags[fld.GetNumber()]; other != "" {
+			return ErrorWithSourcePos{Pos: node.fieldTag().start(), Underlying: fmt.Errorf("%s: duplicate extension: %s and %s are both using tag %d", scope, other, thisName, fld.GetNumber())}
+		}
+		usedExtTags[fld.GetNumber()] = thisName
+		elemType = "extension"
+	}
+
+	if fld.Options != nil {
+		if err := l.resolveOptions(r, fd, elemType, thisName, proto.MessageName(fld.Options), fld.Options.UninterpretedOption, scopes); err != nil {
+			return err
+		}
+	}
+
+	if fld.GetTypeName() == "" {
+		// scalar type; no further resolution required
+		return nil
+	}
+
+	fqn, dsc, proto3 := l.resolve(fd, fld.GetTypeName(), isType, scopes)
+	if dsc == nil {
+		return ErrorWithSourcePos{Pos: node.fieldType().start(), Underlying: fmt.Errorf("%s: unknown type %s", scope, fld.GetTypeName())}
+	}
+	switch dsc := dsc.(type) {
+	case *dpb.DescriptorProto:
+		fld.TypeName = proto.String("." + fqn)
+	case *dpb.EnumDescriptorProto:
+		if fld.GetExtendee() == "" && isProto3(fd) && !proto3 {
+			// fields in a proto3 message cannot refer to proto2 enums
+			return ErrorWithSourcePos{Pos: node.fieldType().start(), Underlying: fmt.Errorf("%s: cannot use proto2 enum %s in a proto3 message", scope, fld.GetTypeName())}
+		}
+		fld.TypeName = proto.String("." + fqn)
+		// the type was tentatively set to message, but now we know it's actually an enum
+		fld.Type = dpb.FieldDescriptorProto_TYPE_ENUM.Enum()
+	default:
+		otherType := descriptorType(dsc)
+		return ErrorWithSourcePos{Pos: node.fieldType().start(), Underlying: fmt.Errorf("%s: invalid type: %s is a %s, not a message or enum", scope, fqn, otherType)}
+	}
+	return nil
+}
+
+func (l *linker) resolveServiceTypes(r *parseResult, fd *dpb.FileDescriptorProto, prefix string, sd *dpb.ServiceDescriptorProto, scopes []scope) error {
+	thisName := prefix + sd.GetName()
+	if sd.Options != nil {
+		if err := l.resolveOptions(r, fd, "service", thisName, proto.MessageName(sd.Options), sd.Options.UninterpretedOption, scopes); err != nil {
+			return err
+		}
+	}
+
+	for _, mtd := range sd.Method {
+		if mtd.Options != nil {
+			if err := l.resolveOptions(r, fd, "method", thisName+"."+mtd.GetName(), proto.MessageName(mtd.Options), mtd.Options.UninterpretedOption, scopes); err != nil {
+				return err
+			}
+		}
+		scope := fmt.Sprintf("method %s.%s", thisName, mtd.GetName())
+		node := r.getMethodNode(mtd)
+		fqn, dsc, _ := l.resolve(fd, mtd.GetInputType(), isMessage, scopes)
+		if dsc == nil {
+			return ErrorWithSourcePos{Pos: node.getInputType().start(), Underlying: fmt.Errorf("%s: unknown request type %s", scope, mtd.GetInputType())}
+		}
+		if _, ok := dsc.(*dpb.DescriptorProto); !ok {
+			otherType := descriptorType(dsc)
+			return ErrorWithSourcePos{Pos: node.getInputType().start(), Underlying: fmt.Errorf("%s: invalid request type: %s is a %s, not a message", scope, fqn, otherType)}
+		}
+		mtd.InputType = proto.String("." + fqn)
+
+		fqn, dsc, _ = l.resolve(fd, mtd.GetOutputType(), isMessage, scopes)
+		if dsc == nil {
+			return ErrorWithSourcePos{Pos: node.getOutputType().start(), Underlying: fmt.Errorf("%s: unknown response type %s", scope, mtd.GetOutputType())}
+		}
+		if _, ok := dsc.(*dpb.DescriptorProto); !ok {
+			otherType := descriptorType(dsc)
+			return ErrorWithSourcePos{Pos: node.getOutputType().start(), Underlying: fmt.Errorf("%s: invalid response type: %s is a %s, not a message", scope, fqn, otherType)}
+		}
+		mtd.OutputType = proto.String("." + fqn)
+	}
+	return nil
+}
+
+func (l *linker) resolveOptions(r *parseResult, fd *dpb.FileDescriptorProto, elemType, elemName, optType string, opts []*dpb.UninterpretedOption, scopes []scope) error {
+	var scope string
+	if elemType != "file" {
+		scope = fmt.Sprintf("%s %s: ", elemType, elemName)
+	}
+	for _, opt := range opts {
+		for _, nm := range opt.Name {
+			if nm.GetIsExtension() {
+				node := r.getOptionNamePartNode(nm)
+				fqn, dsc, _ := l.resolve(fd, nm.GetNamePart(), isField, scopes)
+				if dsc == nil {
+					return ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("%sunknown extension %s", scope, nm.GetNamePart())}
+				}
+				if ext, ok := dsc.(*dpb.FieldDescriptorProto); !ok {
+					otherType := descriptorType(dsc)
+					return ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("%sinvalid extension: %s is a %s, not an extension", scope, nm.GetNamePart(), otherType)}
+				} else if ext.GetExtendee() == "" {
+					return ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("%sinvalid extension: %s is a field but not an extension", scope, nm.GetNamePart())}
+				}
+				nm.NamePart = proto.String("." + fqn)
+			}
+		}
+	}
+	return nil
+}
+
+func (l *linker) resolve(fd *dpb.FileDescriptorProto, name string, allowed func(proto.Message) bool, scopes []scope) (fqn string, element proto.Message, proto3 bool) {
+	if strings.HasPrefix(name, ".") {
+		// already fully-qualified
+		d, proto3 := l.findSymbol(fd, name[1:], false, map[*dpb.FileDescriptorProto]struct{}{})
+		if d != nil {
+			return name[1:], d, proto3
+		}
+	} else {
+		// unqualified, so we look in the enclosing (last) scope first and move
+		// towards outermost (first) scope, trying to resolve the symbol
+		var bestGuess proto.Message
+		var bestGuessFqn string
+		var bestGuessProto3 bool
+		for i := len(scopes) - 1; i >= 0; i-- {
+			fqn, d, proto3 := scopes[i](name)
+			if d != nil {
+				if allowed(d) {
+					return fqn, d, proto3
+				} else if bestGuess == nil {
+					bestGuess = d
+					bestGuessFqn = fqn
+					bestGuessProto3 = proto3
+				}
+			}
+		}
+		// we return best guess, even though it was not an allowed kind of
+		// descriptor, so caller can print a better error message (e.g.
+		// indicating that the name was found but that it's the wrong type)
+		return bestGuessFqn, bestGuess, bestGuessProto3
+	}
+	return "", nil, false
+}
+
+func isField(m proto.Message) bool {
+	_, ok := m.(*dpb.FieldDescriptorProto)
+	return ok
+}
+
+func isMessage(m proto.Message) bool {
+	_, ok := m.(*dpb.DescriptorProto)
+	return ok
+}
+
+func isType(m proto.Message) bool {
+	switch m.(type) {
+	case *dpb.DescriptorProto, *dpb.EnumDescriptorProto:
+		return true
+	}
+	return false
+}
+
+// scope represents a lexical scope in a proto file in which messages and enums
+// can be declared.
+type scope func(symbol string) (fqn string, element proto.Message, proto3 bool)
+
+func fileScope(fd *dpb.FileDescriptorProto, l *linker) scope {
+	// we search symbols in this file, but also symbols in other files that have
+	// the same package as this file or a "parent" package (in protobuf,
+	// packages are a hierarchy like C++ namespaces)
+	prefixes := internal.CreatePrefixList(fd.GetPackage())
+	return func(name string) (string, proto.Message, bool) {
+		for _, prefix := range prefixes {
+			var n string
+			if prefix == "" {
+				n = name
+			} else {
+				n = prefix + "." + name
+			}
+			d, proto3 := l.findSymbol(fd, n, false, map[*dpb.FileDescriptorProto]struct{}{})
+			if d != nil {
+				return n, d, proto3
+			}
+		}
+		return "", nil, false
+	}
+}
+
+func messageScope(messageName string, proto3 bool, filePool map[string]proto.Message) scope {
+	return func(name string) (string, proto.Message, bool) {
+		n := messageName + "." + name
+		if d, ok := filePool[n]; ok {
+			return n, d, proto3
+		}
+		return "", nil, false
+	}
+}
+
+func (l *linker) findSymbol(fd *dpb.FileDescriptorProto, name string, public bool, checked map[*dpb.FileDescriptorProto]struct{}) (element proto.Message, proto3 bool) {
+	if _, ok := checked[fd]; ok {
+		// already checked this one
+		return nil, false
+	}
+	checked[fd] = struct{}{}
+	d := l.descriptorPool[fd][name]
+	if d != nil {
+		return d, isProto3(fd)
+	}
+
+	// When public = false, we are searching only directly imported symbols. But we
+	// also need to search transitive public imports due to semantics of public imports.
+	if public {
+		for _, depIndex := range fd.PublicDependency {
+			dep := fd.Dependency[depIndex]
+			depres := l.files[dep]
+			if depres == nil {
+				// we'll catch this error later
+				continue
+			}
+			if d, proto3 := l.findSymbol(depres.fd, name, true, checked); d != nil {
+				return d, proto3
+			}
+		}
+	} else {
+		for _, dep := range fd.Dependency {
+			depres := l.files[dep]
+			if depres == nil {
+				// we'll catch this error later
+				continue
+			}
+			if d, proto3 := l.findSymbol(depres.fd, name, true, checked); d != nil {
+				return d, proto3
+			}
+		}
+	}
+
+	return nil, false
+}
+
+func isProto3(fd *dpb.FileDescriptorProto) bool {
+	return fd.GetSyntax() == "proto3"
+}
+
+func (l *linker) createdLinkedDescriptors() (map[string]*desc.FileDescriptor, error) {
+	names := make([]string, 0, len(l.files))
+	for name := range l.files {
+		names = append(names, name)
+	}
+	sort.Strings(names)
+	linked := map[string]*desc.FileDescriptor{}
+	for _, name := range names {
+		if _, err := l.linkFile(name, nil, linked); err != nil {
+			return nil, err
+		}
+	}
+	return linked, nil
+}
+
+func (l *linker) linkFile(name string, seen []string, linked map[string]*desc.FileDescriptor) (*desc.FileDescriptor, error) {
+	// check for import cycle
+	for _, s := range seen {
+		if name == s {
+			var msg bytes.Buffer
+			first := true
+			for _, s := range seen {
+				if first {
+					first = false
+				} else {
+					msg.WriteString(" -> ")
+				}
+				fmt.Fprintf(&msg, "%q", s)
+			}
+			fmt.Fprintf(&msg, " -> %q", name)
+			return nil, fmt.Errorf("cycle found in imports: %s", msg.String())
+		}
+	}
+	seen = append(seen, name)
+
+	if lfd, ok := linked[name]; ok {
+		// already linked
+		return lfd, nil
+	}
+	r := l.files[name]
+	if r == nil {
+		importer := seen[len(seen)-2] // len-1 is *this* file, before that is the one that imported it
+		return nil, fmt.Errorf("no descriptor found for %q, imported by %q", name, importer)
+	}
+	var deps []*desc.FileDescriptor
+	for _, dep := range r.fd.Dependency {
+		ldep, err := l.linkFile(dep, seen, linked)
+		if err != nil {
+			return nil, err
+		}
+		deps = append(deps, ldep)
+	}
+	lfd, err := desc.CreateFileDescriptor(r.fd, deps...)
+	if err != nil {
+		return nil, fmt.Errorf("error linking %q: %s", name, err)
+	}
+	linked[name] = lfd
+	return lfd, nil
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/options.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/options.go
new file mode 100644
index 0000000..be287f6
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/options.go
@@ -0,0 +1,1405 @@
+package protoparse
+
+import (
+	"bytes"
+	"fmt"
+	"math"
+
+	"github.com/golang/protobuf/proto"
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/desc"
+	"github.com/jhump/protoreflect/desc/internal"
+	"github.com/jhump/protoreflect/dynamic"
+)
+
+// NB: To process options, we need descriptors, but we may not have rich
+// descriptors when trying to interpret options for unlinked parsed files.
+// So we define minimal interfaces that can be backed by both rich descriptors
+// as well as their poorer cousins, plain ol' descriptor protos.
+
+type descriptorish interface {
+	GetFile() fileDescriptorish
+	GetFullyQualifiedName() string
+	AsProto() proto.Message
+}
+
+type fileDescriptorish interface {
+	descriptorish
+	GetFileOptions() *dpb.FileOptions
+	GetPackage() string
+	FindSymbol(name string) desc.Descriptor
+	GetPublicDependencies() []fileDescriptorish
+	GetDependencies() []fileDescriptorish
+	GetMessageTypes() []msgDescriptorish
+	GetExtensions() []fldDescriptorish
+	GetEnumTypes() []enumDescriptorish
+	GetServices() []svcDescriptorish
+}
+
+type msgDescriptorish interface {
+	descriptorish
+	GetMessageOptions() *dpb.MessageOptions
+	GetFields() []fldDescriptorish
+	GetOneOfs() []oneofDescriptorish
+	GetExtensionRanges() []extRangeDescriptorish
+	GetNestedMessageTypes() []msgDescriptorish
+	GetNestedExtensions() []fldDescriptorish
+	GetNestedEnumTypes() []enumDescriptorish
+}
+
+type fldDescriptorish interface {
+	descriptorish
+	GetFieldOptions() *dpb.FieldOptions
+	GetMessageType() *desc.MessageDescriptor
+	GetEnumType() *desc.EnumDescriptor
+	AsFieldDescriptorProto() *dpb.FieldDescriptorProto
+}
+
+type oneofDescriptorish interface {
+	descriptorish
+	GetOneOfOptions() *dpb.OneofOptions
+}
+
+type enumDescriptorish interface {
+	descriptorish
+	GetEnumOptions() *dpb.EnumOptions
+	GetValues() []enumValDescriptorish
+}
+
+type enumValDescriptorish interface {
+	descriptorish
+	GetEnumValueOptions() *dpb.EnumValueOptions
+}
+
+type svcDescriptorish interface {
+	descriptorish
+	GetServiceOptions() *dpb.ServiceOptions
+	GetMethods() []methodDescriptorish
+}
+
+type methodDescriptorish interface {
+	descriptorish
+	GetMethodOptions() *dpb.MethodOptions
+}
+
+// The hierarchy of descriptorish implementations backed by
+// rich descriptors:
+
+type richFileDescriptorish struct {
+	*desc.FileDescriptor
+}
+
+func (d richFileDescriptorish) GetFile() fileDescriptorish {
+	return d
+}
+
+func (d richFileDescriptorish) GetPublicDependencies() []fileDescriptorish {
+	deps := d.FileDescriptor.GetPublicDependencies()
+	ret := make([]fileDescriptorish, len(deps))
+	for i, d := range deps {
+		ret[i] = richFileDescriptorish{FileDescriptor: d}
+	}
+	return ret
+}
+
+func (d richFileDescriptorish) GetDependencies() []fileDescriptorish {
+	deps := d.FileDescriptor.GetDependencies()
+	ret := make([]fileDescriptorish, len(deps))
+	for i, d := range deps {
+		ret[i] = richFileDescriptorish{FileDescriptor: d}
+	}
+	return ret
+}
+
+func (d richFileDescriptorish) GetMessageTypes() []msgDescriptorish {
+	msgs := d.FileDescriptor.GetMessageTypes()
+	ret := make([]msgDescriptorish, len(msgs))
+	for i, m := range msgs {
+		ret[i] = richMsgDescriptorish{MessageDescriptor: m}
+	}
+	return ret
+}
+
+func (d richFileDescriptorish) GetExtensions() []fldDescriptorish {
+	flds := d.FileDescriptor.GetExtensions()
+	ret := make([]fldDescriptorish, len(flds))
+	for i, f := range flds {
+		ret[i] = richFldDescriptorish{FieldDescriptor: f}
+	}
+	return ret
+}
+
+func (d richFileDescriptorish) GetEnumTypes() []enumDescriptorish {
+	ens := d.FileDescriptor.GetEnumTypes()
+	ret := make([]enumDescriptorish, len(ens))
+	for i, en := range ens {
+		ret[i] = richEnumDescriptorish{EnumDescriptor: en}
+	}
+	return ret
+}
+
+func (d richFileDescriptorish) GetServices() []svcDescriptorish {
+	svcs := d.FileDescriptor.GetServices()
+	ret := make([]svcDescriptorish, len(svcs))
+	for i, s := range svcs {
+		ret[i] = richSvcDescriptorish{ServiceDescriptor: s}
+	}
+	return ret
+}
+
+type richMsgDescriptorish struct {
+	*desc.MessageDescriptor
+}
+
+func (d richMsgDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.MessageDescriptor.GetFile()}
+}
+
+func (d richMsgDescriptorish) GetFields() []fldDescriptorish {
+	flds := d.MessageDescriptor.GetFields()
+	ret := make([]fldDescriptorish, len(flds))
+	for i, f := range flds {
+		ret[i] = richFldDescriptorish{FieldDescriptor: f}
+	}
+	return ret
+}
+
+func (d richMsgDescriptorish) GetOneOfs() []oneofDescriptorish {
+	oos := d.MessageDescriptor.GetOneOfs()
+	ret := make([]oneofDescriptorish, len(oos))
+	for i, oo := range oos {
+		ret[i] = richOneOfDescriptorish{OneOfDescriptor: oo}
+	}
+	return ret
+}
+
+func (d richMsgDescriptorish) GetExtensionRanges() []extRangeDescriptorish {
+	md := d.MessageDescriptor
+	mdFqn := md.GetFullyQualifiedName()
+	extrs := md.AsDescriptorProto().GetExtensionRange()
+	ret := make([]extRangeDescriptorish, len(extrs))
+	for i, extr := range extrs {
+		ret[i] = extRangeDescriptorish{
+			er:   extr,
+			qual: mdFqn,
+			file: richFileDescriptorish{FileDescriptor: md.GetFile()},
+		}
+	}
+	return ret
+}
+
+func (d richMsgDescriptorish) GetNestedMessageTypes() []msgDescriptorish {
+	msgs := d.MessageDescriptor.GetNestedMessageTypes()
+	ret := make([]msgDescriptorish, len(msgs))
+	for i, m := range msgs {
+		ret[i] = richMsgDescriptorish{MessageDescriptor: m}
+	}
+	return ret
+}
+
+func (d richMsgDescriptorish) GetNestedExtensions() []fldDescriptorish {
+	flds := d.MessageDescriptor.GetNestedExtensions()
+	ret := make([]fldDescriptorish, len(flds))
+	for i, f := range flds {
+		ret[i] = richFldDescriptorish{FieldDescriptor: f}
+	}
+	return ret
+}
+
+func (d richMsgDescriptorish) GetNestedEnumTypes() []enumDescriptorish {
+	ens := d.MessageDescriptor.GetNestedEnumTypes()
+	ret := make([]enumDescriptorish, len(ens))
+	for i, en := range ens {
+		ret[i] = richEnumDescriptorish{EnumDescriptor: en}
+	}
+	return ret
+}
+
+type richFldDescriptorish struct {
+	*desc.FieldDescriptor
+}
+
+func (d richFldDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.FieldDescriptor.GetFile()}
+}
+
+func (d richFldDescriptorish) AsFieldDescriptorProto() *dpb.FieldDescriptorProto {
+	return d.FieldDescriptor.AsFieldDescriptorProto()
+}
+
+type richOneOfDescriptorish struct {
+	*desc.OneOfDescriptor
+}
+
+func (d richOneOfDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.OneOfDescriptor.GetFile()}
+}
+
+type richEnumDescriptorish struct {
+	*desc.EnumDescriptor
+}
+
+func (d richEnumDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.EnumDescriptor.GetFile()}
+}
+
+func (d richEnumDescriptorish) GetValues() []enumValDescriptorish {
+	vals := d.EnumDescriptor.GetValues()
+	ret := make([]enumValDescriptorish, len(vals))
+	for i, val := range vals {
+		ret[i] = richEnumValDescriptorish{EnumValueDescriptor: val}
+	}
+	return ret
+}
+
+type richEnumValDescriptorish struct {
+	*desc.EnumValueDescriptor
+}
+
+func (d richEnumValDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.EnumValueDescriptor.GetFile()}
+}
+
+type richSvcDescriptorish struct {
+	*desc.ServiceDescriptor
+}
+
+func (d richSvcDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.ServiceDescriptor.GetFile()}
+}
+
+func (d richSvcDescriptorish) GetMethods() []methodDescriptorish {
+	mtds := d.ServiceDescriptor.GetMethods()
+	ret := make([]methodDescriptorish, len(mtds))
+	for i, mtd := range mtds {
+		ret[i] = richMethodDescriptorish{MethodDescriptor: mtd}
+	}
+	return ret
+}
+
+type richMethodDescriptorish struct {
+	*desc.MethodDescriptor
+}
+
+func (d richMethodDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.MethodDescriptor.GetFile()}
+}
+
+// The hierarchy of descriptorish implementations backed by
+// plain descriptor protos:
+
+type poorFileDescriptorish struct {
+	*dpb.FileDescriptorProto
+}
+
+func (d poorFileDescriptorish) GetFile() fileDescriptorish {
+	return d
+}
+
+func (d poorFileDescriptorish) GetFullyQualifiedName() string {
+	return d.FileDescriptorProto.GetName()
+}
+
+func (d poorFileDescriptorish) AsProto() proto.Message {
+	return d.FileDescriptorProto
+}
+
+func (d poorFileDescriptorish) GetFileOptions() *dpb.FileOptions {
+	return d.FileDescriptorProto.GetOptions()
+}
+
+func (d poorFileDescriptorish) FindSymbol(name string) desc.Descriptor {
+	return nil
+}
+
+func (d poorFileDescriptorish) GetPublicDependencies() []fileDescriptorish {
+	return nil
+}
+
+func (d poorFileDescriptorish) GetDependencies() []fileDescriptorish {
+	return nil
+}
+
+func (d poorFileDescriptorish) GetMessageTypes() []msgDescriptorish {
+	msgs := d.FileDescriptorProto.GetMessageType()
+	pkg := d.FileDescriptorProto.GetPackage()
+	ret := make([]msgDescriptorish, len(msgs))
+	for i, m := range msgs {
+		ret[i] = poorMsgDescriptorish{
+			DescriptorProto: m,
+			qual:            pkg,
+			file:            d,
+		}
+	}
+	return ret
+}
+
+func (d poorFileDescriptorish) GetExtensions() []fldDescriptorish {
+	exts := d.FileDescriptorProto.GetExtension()
+	pkg := d.FileDescriptorProto.GetPackage()
+	ret := make([]fldDescriptorish, len(exts))
+	for i, e := range exts {
+		ret[i] = poorFldDescriptorish{
+			FieldDescriptorProto: e,
+			qual:                 pkg,
+			file:                 d,
+		}
+	}
+	return ret
+}
+
+func (d poorFileDescriptorish) GetEnumTypes() []enumDescriptorish {
+	ens := d.FileDescriptorProto.GetEnumType()
+	pkg := d.FileDescriptorProto.GetPackage()
+	ret := make([]enumDescriptorish, len(ens))
+	for i, e := range ens {
+		ret[i] = poorEnumDescriptorish{
+			EnumDescriptorProto: e,
+			qual:                pkg,
+			file:                d,
+		}
+	}
+	return ret
+}
+
+func (d poorFileDescriptorish) GetServices() []svcDescriptorish {
+	svcs := d.FileDescriptorProto.GetService()
+	pkg := d.FileDescriptorProto.GetPackage()
+	ret := make([]svcDescriptorish, len(svcs))
+	for i, s := range svcs {
+		ret[i] = poorSvcDescriptorish{
+			ServiceDescriptorProto: s,
+			qual:                   pkg,
+			file:                   d,
+		}
+	}
+	return ret
+}
+
+type poorMsgDescriptorish struct {
+	*dpb.DescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorMsgDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorMsgDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.DescriptorProto.GetName())
+}
+
+func qualify(qual, name string) string {
+	if qual == "" {
+		return name
+	} else {
+		return fmt.Sprintf("%s.%s", qual, name)
+	}
+}
+
+func (d poorMsgDescriptorish) AsProto() proto.Message {
+	return d.DescriptorProto
+}
+
+func (d poorMsgDescriptorish) GetMessageOptions() *dpb.MessageOptions {
+	return d.DescriptorProto.GetOptions()
+}
+
+func (d poorMsgDescriptorish) GetFields() []fldDescriptorish {
+	flds := d.DescriptorProto.GetField()
+	ret := make([]fldDescriptorish, len(flds))
+	for i, f := range flds {
+		ret[i] = poorFldDescriptorish{
+			FieldDescriptorProto: f,
+			qual:                 d.GetFullyQualifiedName(),
+			file:                 d.file,
+		}
+	}
+	return ret
+}
+
+func (d poorMsgDescriptorish) GetOneOfs() []oneofDescriptorish {
+	oos := d.DescriptorProto.GetOneofDecl()
+	ret := make([]oneofDescriptorish, len(oos))
+	for i, oo := range oos {
+		ret[i] = poorOneOfDescriptorish{
+			OneofDescriptorProto: oo,
+			qual:                 d.GetFullyQualifiedName(),
+			file:                 d.file,
+		}
+	}
+	return ret
+}
+
+func (d poorMsgDescriptorish) GetExtensionRanges() []extRangeDescriptorish {
+	mdFqn := d.GetFullyQualifiedName()
+	extrs := d.DescriptorProto.GetExtensionRange()
+	ret := make([]extRangeDescriptorish, len(extrs))
+	for i, extr := range extrs {
+		ret[i] = extRangeDescriptorish{
+			er:   extr,
+			qual: mdFqn,
+			file: d.file,
+		}
+	}
+	return ret
+}
+
+func (d poorMsgDescriptorish) GetNestedMessageTypes() []msgDescriptorish {
+	msgs := d.DescriptorProto.GetNestedType()
+	ret := make([]msgDescriptorish, len(msgs))
+	for i, m := range msgs {
+		ret[i] = poorMsgDescriptorish{
+			DescriptorProto: m,
+			qual:            d.GetFullyQualifiedName(),
+			file:            d.file,
+		}
+	}
+	return ret
+}
+
+func (d poorMsgDescriptorish) GetNestedExtensions() []fldDescriptorish {
+	flds := d.DescriptorProto.GetExtension()
+	ret := make([]fldDescriptorish, len(flds))
+	for i, f := range flds {
+		ret[i] = poorFldDescriptorish{
+			FieldDescriptorProto: f,
+			qual:                 d.GetFullyQualifiedName(),
+			file:                 d.file,
+		}
+	}
+	return ret
+}
+
+func (d poorMsgDescriptorish) GetNestedEnumTypes() []enumDescriptorish {
+	ens := d.DescriptorProto.GetEnumType()
+	ret := make([]enumDescriptorish, len(ens))
+	for i, en := range ens {
+		ret[i] = poorEnumDescriptorish{
+			EnumDescriptorProto: en,
+			qual:                d.GetFullyQualifiedName(),
+			file:                d.file,
+		}
+	}
+	return ret
+}
+
+type poorFldDescriptorish struct {
+	*dpb.FieldDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorFldDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorFldDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.FieldDescriptorProto.GetName())
+}
+
+func (d poorFldDescriptorish) AsProto() proto.Message {
+	return d.FieldDescriptorProto
+}
+
+func (d poorFldDescriptorish) GetFieldOptions() *dpb.FieldOptions {
+	return d.FieldDescriptorProto.GetOptions()
+}
+
+func (d poorFldDescriptorish) GetMessageType() *desc.MessageDescriptor {
+	return nil
+}
+
+func (d poorFldDescriptorish) GetEnumType() *desc.EnumDescriptor {
+	return nil
+}
+
+type poorOneOfDescriptorish struct {
+	*dpb.OneofDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorOneOfDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorOneOfDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.OneofDescriptorProto.GetName())
+}
+
+func (d poorOneOfDescriptorish) AsProto() proto.Message {
+	return d.OneofDescriptorProto
+}
+
+func (d poorOneOfDescriptorish) GetOneOfOptions() *dpb.OneofOptions {
+	return d.OneofDescriptorProto.GetOptions()
+}
+
+func (d poorFldDescriptorish) AsFieldDescriptorProto() *dpb.FieldDescriptorProto {
+	return d.FieldDescriptorProto
+}
+
+type poorEnumDescriptorish struct {
+	*dpb.EnumDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorEnumDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorEnumDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.EnumDescriptorProto.GetName())
+}
+
+func (d poorEnumDescriptorish) AsProto() proto.Message {
+	return d.EnumDescriptorProto
+}
+
+func (d poorEnumDescriptorish) GetEnumOptions() *dpb.EnumOptions {
+	return d.EnumDescriptorProto.GetOptions()
+}
+
+func (d poorEnumDescriptorish) GetValues() []enumValDescriptorish {
+	vals := d.EnumDescriptorProto.GetValue()
+	ret := make([]enumValDescriptorish, len(vals))
+	for i, v := range vals {
+		ret[i] = poorEnumValDescriptorish{
+			EnumValueDescriptorProto: v,
+			qual:                     d.GetFullyQualifiedName(),
+			file:                     d.file,
+		}
+	}
+	return ret
+}
+
+type poorEnumValDescriptorish struct {
+	*dpb.EnumValueDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorEnumValDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorEnumValDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.EnumValueDescriptorProto.GetName())
+}
+
+func (d poorEnumValDescriptorish) AsProto() proto.Message {
+	return d.EnumValueDescriptorProto
+}
+
+func (d poorEnumValDescriptorish) GetEnumValueOptions() *dpb.EnumValueOptions {
+	return d.EnumValueDescriptorProto.GetOptions()
+}
+
+type poorSvcDescriptorish struct {
+	*dpb.ServiceDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorSvcDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorSvcDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.ServiceDescriptorProto.GetName())
+}
+
+func (d poorSvcDescriptorish) AsProto() proto.Message {
+	return d.ServiceDescriptorProto
+}
+
+func (d poorSvcDescriptorish) GetServiceOptions() *dpb.ServiceOptions {
+	return d.ServiceDescriptorProto.GetOptions()
+}
+
+func (d poorSvcDescriptorish) GetMethods() []methodDescriptorish {
+	mtds := d.ServiceDescriptorProto.GetMethod()
+	ret := make([]methodDescriptorish, len(mtds))
+	for i, m := range mtds {
+		ret[i] = poorMethodDescriptorish{
+			MethodDescriptorProto: m,
+			qual:                  d.GetFullyQualifiedName(),
+			file:                  d.file,
+		}
+	}
+	return ret
+}
+
+type poorMethodDescriptorish struct {
+	*dpb.MethodDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorMethodDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorMethodDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.MethodDescriptorProto.GetName())
+}
+
+func (d poorMethodDescriptorish) AsProto() proto.Message {
+	return d.MethodDescriptorProto
+}
+
+func (d poorMethodDescriptorish) GetMethodOptions() *dpb.MethodOptions {
+	return d.MethodDescriptorProto.GetOptions()
+}
+
+type extRangeDescriptorish struct {
+	er   *dpb.DescriptorProto_ExtensionRange
+	qual string
+	file fileDescriptorish
+}
+
+func (er extRangeDescriptorish) GetFile() fileDescriptorish {
+	return er.file
+}
+
+func (er extRangeDescriptorish) GetFullyQualifiedName() string {
+	return qualify(er.qual, fmt.Sprintf("%d-%d", er.er.GetStart(), er.er.GetEnd()-1))
+}
+
+func (er extRangeDescriptorish) AsProto() proto.Message {
+	return er.er
+}
+
+func (er extRangeDescriptorish) GetExtensionRangeOptions() *dpb.ExtensionRangeOptions {
+	return er.er.GetOptions()
+}
+
+func interpretFileOptions(r *parseResult, fd fileDescriptorish) error {
+	opts := fd.GetFileOptions()
+	if opts != nil {
+		if len(opts.UninterpretedOption) > 0 {
+			if remain, err := interpretOptions(r, fd, opts, opts.UninterpretedOption); err != nil {
+				return err
+			} else {
+				opts.UninterpretedOption = remain
+			}
+		}
+	}
+	for _, md := range fd.GetMessageTypes() {
+		if err := interpretMessageOptions(r, md); err != nil {
+			return err
+		}
+	}
+	for _, fld := range fd.GetExtensions() {
+		if err := interpretFieldOptions(r, fld); err != nil {
+			return err
+		}
+	}
+	for _, ed := range fd.GetEnumTypes() {
+		if err := interpretEnumOptions(r, ed); err != nil {
+			return err
+		}
+	}
+	for _, sd := range fd.GetServices() {
+		opts := sd.GetServiceOptions()
+		if opts != nil {
+			if len(opts.UninterpretedOption) > 0 {
+				if remain, err := interpretOptions(r, sd, opts, opts.UninterpretedOption); err != nil {
+					return err
+				} else {
+					opts.UninterpretedOption = remain
+				}
+			}
+		}
+		for _, mtd := range sd.GetMethods() {
+			opts := mtd.GetMethodOptions()
+			if opts != nil {
+				if len(opts.UninterpretedOption) > 0 {
+					if remain, err := interpretOptions(r, mtd, opts, opts.UninterpretedOption); err != nil {
+						return err
+					} else {
+						opts.UninterpretedOption = remain
+					}
+				}
+			}
+		}
+	}
+	return nil
+}
+
+func interpretMessageOptions(r *parseResult, md msgDescriptorish) error {
+	opts := md.GetMessageOptions()
+	if opts != nil {
+		if len(opts.UninterpretedOption) > 0 {
+			if remain, err := interpretOptions(r, md, opts, opts.UninterpretedOption); err != nil {
+				return err
+			} else {
+				opts.UninterpretedOption = remain
+			}
+		}
+	}
+	for _, fld := range md.GetFields() {
+		if err := interpretFieldOptions(r, fld); err != nil {
+			return err
+		}
+	}
+	for _, ood := range md.GetOneOfs() {
+		opts := ood.GetOneOfOptions()
+		if opts != nil {
+			if len(opts.UninterpretedOption) > 0 {
+				if remain, err := interpretOptions(r, ood, opts, opts.UninterpretedOption); err != nil {
+					return err
+				} else {
+					opts.UninterpretedOption = remain
+				}
+			}
+		}
+	}
+	for _, fld := range md.GetNestedExtensions() {
+		if err := interpretFieldOptions(r, fld); err != nil {
+			return err
+		}
+	}
+	for _, er := range md.GetExtensionRanges() {
+		opts := er.GetExtensionRangeOptions()
+		if opts != nil {
+			if len(opts.UninterpretedOption) > 0 {
+				if remain, err := interpretOptions(r, er, opts, opts.UninterpretedOption); err != nil {
+					return err
+				} else {
+					opts.UninterpretedOption = remain
+				}
+			}
+		}
+	}
+	for _, nmd := range md.GetNestedMessageTypes() {
+		if err := interpretMessageOptions(r, nmd); err != nil {
+			return err
+		}
+	}
+	for _, ed := range md.GetNestedEnumTypes() {
+		if err := interpretEnumOptions(r, ed); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func interpretFieldOptions(r *parseResult, fld fldDescriptorish) error {
+	opts := fld.GetFieldOptions()
+	if opts != nil {
+		if len(opts.UninterpretedOption) > 0 {
+			uo := opts.UninterpretedOption
+			scope := fmt.Sprintf("field %s", fld.GetFullyQualifiedName())
+
+			// process json_name pseudo-option
+			if index, err := findOption(r, scope, uo, "json_name"); err != nil && !r.lenient {
+				return err
+			} else if err == nil && index >= 0 {
+				opt := uo[index]
+				optNode := r.getOptionNode(opt)
+
+				// attribute source code info
+				if on, ok := optNode.(*optionNode); ok {
+					r.interpretedOptions[on] = []int32{-1, internal.Field_jsonNameTag}
+				}
+				uo = removeOption(uo, index)
+				if opt.StringValue == nil {
+					return ErrorWithSourcePos{Pos: optNode.getValue().start(), Underlying: fmt.Errorf("%s: expecting string value for json_name option", scope)}
+				}
+				fld.AsFieldDescriptorProto().JsonName = proto.String(string(opt.StringValue))
+			}
+
+			// and process default pseudo-option
+			if index, err := processDefaultOption(r, scope, fld, uo); err != nil && !r.lenient {
+				return err
+			} else if err == nil && index >= 0 {
+				// attribute source code info
+				optNode := r.getOptionNode(uo[index])
+				if on, ok := optNode.(*optionNode); ok {
+					r.interpretedOptions[on] = []int32{-1, internal.Field_defaultTag}
+				}
+				uo = removeOption(uo, index)
+			}
+
+			if len(uo) == 0 {
+				// no real options, only pseudo-options above? clear out options
+				fld.AsFieldDescriptorProto().Options = nil
+			} else if remain, err := interpretOptions(r, fld, opts, uo); err != nil {
+				return err
+			} else {
+				opts.UninterpretedOption = remain
+			}
+		}
+	}
+	return nil
+}
+
+func processDefaultOption(res *parseResult, scope string, fld fldDescriptorish, uos []*dpb.UninterpretedOption) (defaultIndex int, err error) {
+	found, err := findOption(res, scope, uos, "default")
+	if err != nil {
+		return -1, err
+	} else if found == -1 {
+		return -1, nil
+	}
+	opt := uos[found]
+	optNode := res.getOptionNode(opt)
+	fdp := fld.AsFieldDescriptorProto()
+	if fdp.GetLabel() == dpb.FieldDescriptorProto_LABEL_REPEATED {
+		return -1, ErrorWithSourcePos{Pos: optNode.getName().start(), Underlying: fmt.Errorf("%s: default value cannot be set because field is repeated", scope)}
+	}
+	if fdp.GetType() == dpb.FieldDescriptorProto_TYPE_GROUP || fdp.GetType() == dpb.FieldDescriptorProto_TYPE_MESSAGE {
+		return -1, ErrorWithSourcePos{Pos: optNode.getName().start(), Underlying: fmt.Errorf("%s: default value cannot be set because field is a message", scope)}
+	}
+	val := optNode.getValue()
+	if _, ok := val.(*aggregateLiteralNode); ok {
+		return -1, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%s: default value cannot be an aggregate", scope)}
+	}
+	mc := &messageContext{
+		res:         res,
+		file:        fld.GetFile(),
+		elementName: fld.GetFullyQualifiedName(),
+		elementType: descriptorType(fld.AsProto()),
+		option:      opt,
+	}
+	v, err := fieldValue(res, mc, fld, val, true)
+	if err != nil {
+		return -1, err
+	}
+	if str, ok := v.(string); ok {
+		fld.AsFieldDescriptorProto().DefaultValue = proto.String(str)
+	} else if b, ok := v.([]byte); ok {
+		fld.AsFieldDescriptorProto().DefaultValue = proto.String(encodeDefaultBytes(b))
+	} else {
+		var flt float64
+		var ok bool
+		if flt, ok = v.(float64); !ok {
+			var flt32 float32
+			if flt32, ok = v.(float32); ok {
+				flt = float64(flt32)
+			}
+		}
+		if ok {
+			if math.IsInf(flt, 1) {
+				fld.AsFieldDescriptorProto().DefaultValue = proto.String("inf")
+			} else if ok && math.IsInf(flt, -1) {
+				fld.AsFieldDescriptorProto().DefaultValue = proto.String("-inf")
+			} else if ok && math.IsNaN(flt) {
+				fld.AsFieldDescriptorProto().DefaultValue = proto.String("nan")
+			} else {
+				fld.AsFieldDescriptorProto().DefaultValue = proto.String(fmt.Sprintf("%v", v))
+			}
+		} else {
+			fld.AsFieldDescriptorProto().DefaultValue = proto.String(fmt.Sprintf("%v", v))
+		}
+	}
+	return found, nil
+}
+
+func encodeDefaultBytes(b []byte) string {
+	var buf bytes.Buffer
+	writeEscapedBytes(&buf, b)
+	return buf.String()
+}
+
+func interpretEnumOptions(r *parseResult, ed enumDescriptorish) error {
+	opts := ed.GetEnumOptions()
+	if opts != nil {
+		if len(opts.UninterpretedOption) > 0 {
+			if remain, err := interpretOptions(r, ed, opts, opts.UninterpretedOption); err != nil {
+				return err
+			} else {
+				opts.UninterpretedOption = remain
+			}
+		}
+	}
+	for _, evd := range ed.GetValues() {
+		opts := evd.GetEnumValueOptions()
+		if opts != nil {
+			if len(opts.UninterpretedOption) > 0 {
+				if remain, err := interpretOptions(r, evd, opts, opts.UninterpretedOption); err != nil {
+					return err
+				} else {
+					opts.UninterpretedOption = remain
+				}
+			}
+		}
+	}
+	return nil
+}
+
+func interpretOptions(res *parseResult, element descriptorish, opts proto.Message, uninterpreted []*dpb.UninterpretedOption) ([]*dpb.UninterpretedOption, error) {
+	optsd, err := desc.LoadMessageDescriptorForMessage(opts)
+	if err != nil {
+		if res.lenient {
+			return uninterpreted, nil
+		}
+		return nil, err
+	}
+	dm := dynamic.NewMessage(optsd)
+	err = dm.ConvertFrom(opts)
+	if err != nil {
+		if res.lenient {
+			return uninterpreted, nil
+		}
+		node := res.nodes[element.AsProto()]
+		return nil, ErrorWithSourcePos{Pos: node.start(), Underlying: err}
+	}
+
+	mc := &messageContext{res: res, file: element.GetFile(), elementName: element.GetFullyQualifiedName(), elementType: descriptorType(element.AsProto())}
+	var remain []*dpb.UninterpretedOption
+	for _, uo := range uninterpreted {
+		node := res.getOptionNode(uo)
+		if !uo.Name[0].GetIsExtension() && uo.Name[0].GetNamePart() == "uninterpreted_option" {
+			if res.lenient {
+				remain = append(remain, uo)
+				continue
+			}
+			// uninterpreted_option might be found reflectively, but is not actually valid for use
+			return nil, ErrorWithSourcePos{Pos: node.getName().start(), Underlying: fmt.Errorf("%vinvalid option 'uninterpreted_option'", mc)}
+		}
+		mc.option = uo
+		path, err := interpretField(res, mc, element, dm, uo, 0, nil)
+		if err != nil {
+			if res.lenient {
+				remain = append(remain, uo)
+				continue
+			}
+			return nil, err
+		}
+		if optn, ok := node.(*optionNode); ok {
+			res.interpretedOptions[optn] = path
+		}
+	}
+
+	if err := dm.ValidateRecursive(); err != nil {
+		// if lenient, we'll let this pass, but it means that some required field was not set!
+		// TODO: do this in a more granular way, so we can validate individual fields
+		// and leave them uninterpreted, instead of just having to live with the
+		// thing having invalid data in extensions.
+		if !res.lenient {
+			node := res.nodes[element.AsProto()]
+			return nil, ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("error in %s options: %v", descriptorType(element.AsProto()), err)}
+		}
+	}
+
+	if res.lenient {
+		// If we're lenient, then we don't want to clobber the passed in message
+		// and leave it partially populated. So we convert into a copy first
+		optsClone := proto.Clone(opts)
+		if err := dm.ConvertTo(optsClone); err != nil {
+			// TODO: do this in a more granular way, so we can convert individual
+			// fields and leave bad ones uninterpreted instead of skipping all of
+			// the work we've done so far.
+			return uninterpreted, nil
+		}
+		// conversion from dynamic message above worked, so now
+		// it is safe to overwrite the passed in message
+		opts.Reset()
+		proto.Merge(opts, optsClone)
+
+	} else {
+		// not lenient: try to convert into the passed in message
+		// and fail is not successful
+		if err := dm.ConvertTo(opts); err != nil {
+			node := res.nodes[element.AsProto()]
+			return nil, ErrorWithSourcePos{Pos: node.start(), Underlying: err}
+		}
+	}
+
+	return remain, nil
+}
+
+func interpretField(res *parseResult, mc *messageContext, element descriptorish, dm *dynamic.Message, opt *dpb.UninterpretedOption, nameIndex int, pathPrefix []int32) (path []int32, err error) {
+	var fld *desc.FieldDescriptor
+	nm := opt.GetName()[nameIndex]
+	node := res.getOptionNamePartNode(nm)
+	if nm.GetIsExtension() {
+		extName := nm.GetNamePart()
+		if extName[0] == '.' {
+			extName = extName[1:] /* skip leading dot */
+		}
+		fld = findExtension(element.GetFile(), extName, false, map[fileDescriptorish]struct{}{})
+		if fld == nil {
+			return nil, ErrorWithSourcePos{
+				Pos: node.start(),
+				Underlying: fmt.Errorf("%vunrecognized extension %s of %s",
+					mc, extName, dm.GetMessageDescriptor().GetFullyQualifiedName()),
+			}
+		}
+		if fld.GetOwner().GetFullyQualifiedName() != dm.GetMessageDescriptor().GetFullyQualifiedName() {
+			return nil, ErrorWithSourcePos{
+				Pos: node.start(),
+				Underlying: fmt.Errorf("%vextension %s should extend %s but instead extends %s",
+					mc, extName, dm.GetMessageDescriptor().GetFullyQualifiedName(), fld.GetOwner().GetFullyQualifiedName()),
+			}
+		}
+	} else {
+		fld = dm.GetMessageDescriptor().FindFieldByName(nm.GetNamePart())
+		if fld == nil {
+			return nil, ErrorWithSourcePos{
+				Pos: node.start(),
+				Underlying: fmt.Errorf("%vfield %s of %s does not exist",
+					mc, nm.GetNamePart(), dm.GetMessageDescriptor().GetFullyQualifiedName()),
+			}
+		}
+	}
+
+	path = append(pathPrefix, fld.GetNumber())
+
+	if len(opt.GetName()) > nameIndex+1 {
+		nextnm := opt.GetName()[nameIndex+1]
+		nextnode := res.getOptionNamePartNode(nextnm)
+		if fld.GetType() != dpb.FieldDescriptorProto_TYPE_MESSAGE {
+			return nil, ErrorWithSourcePos{
+				Pos: nextnode.start(),
+				Underlying: fmt.Errorf("%vcannot set field %s because %s is not a message",
+					mc, nextnm.GetNamePart(), nm.GetNamePart()),
+			}
+		}
+		if fld.IsRepeated() {
+			return nil, ErrorWithSourcePos{
+				Pos: nextnode.start(),
+				Underlying: fmt.Errorf("%vcannot set field %s because %s is repeated (must use an aggregate)",
+					mc, nextnm.GetNamePart(), nm.GetNamePart()),
+			}
+		}
+		var fdm *dynamic.Message
+		var err error
+		if dm.HasField(fld) {
+			var v interface{}
+			v, err = dm.TryGetField(fld)
+			fdm, _ = v.(*dynamic.Message)
+		} else {
+			fdm = dynamic.NewMessage(fld.GetMessageType())
+			err = dm.TrySetField(fld, fdm)
+		}
+		if err != nil {
+			return nil, ErrorWithSourcePos{Pos: node.start(), Underlying: err}
+		}
+		// recurse to set next part of name
+		return interpretField(res, mc, element, fdm, opt, nameIndex+1, path)
+	}
+
+	optNode := res.getOptionNode(opt)
+	if err := setOptionField(res, mc, dm, fld, node, optNode.getValue()); err != nil {
+		return nil, err
+	}
+	if fld.IsRepeated() {
+		path = append(path, int32(dm.FieldLength(fld))-1)
+	}
+	return path, nil
+}
+
+func findExtension(fd fileDescriptorish, name string, public bool, checked map[fileDescriptorish]struct{}) *desc.FieldDescriptor {
+	if _, ok := checked[fd]; ok {
+		return nil
+	}
+	checked[fd] = struct{}{}
+	d := fd.FindSymbol(name)
+	if d != nil {
+		if fld, ok := d.(*desc.FieldDescriptor); ok {
+			return fld
+		}
+		return nil
+	}
+
+	// When public = false, we are searching only directly imported symbols. But we
+	// also need to search transitive public imports due to semantics of public imports.
+	if public {
+		for _, dep := range fd.GetPublicDependencies() {
+			d := findExtension(dep, name, true, checked)
+			if d != nil {
+				return d
+			}
+		}
+	} else {
+		for _, dep := range fd.GetDependencies() {
+			d := findExtension(dep, name, true, checked)
+			if d != nil {
+				return d
+			}
+		}
+	}
+	return nil
+}
+
+func setOptionField(res *parseResult, mc *messageContext, dm *dynamic.Message, fld *desc.FieldDescriptor, name node, val valueNode) error {
+	v := val.value()
+	if sl, ok := v.([]valueNode); ok {
+		// handle slices a little differently than the others
+		if !fld.IsRepeated() {
+			return ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue is an array but field is not repeated", mc)}
+		}
+		origPath := mc.optAggPath
+		defer func() {
+			mc.optAggPath = origPath
+		}()
+		for index, item := range sl {
+			mc.optAggPath = fmt.Sprintf("%s[%d]", origPath, index)
+			if v, err := fieldValue(res, mc, richFldDescriptorish{FieldDescriptor: fld}, item, false); err != nil {
+				return err
+			} else if err = dm.TryAddRepeatedField(fld, v); err != nil {
+				return ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%verror setting value: %s", mc, err)}
+			}
+		}
+		return nil
+	}
+
+	v, err := fieldValue(res, mc, richFldDescriptorish{FieldDescriptor: fld}, val, false)
+	if err != nil {
+		return err
+	}
+	if fld.IsRepeated() {
+		err = dm.TryAddRepeatedField(fld, v)
+	} else {
+		if dm.HasField(fld) {
+			return ErrorWithSourcePos{Pos: name.start(), Underlying: fmt.Errorf("%vnon-repeated option field %s already set", mc, fieldName(fld))}
+		}
+		err = dm.TrySetField(fld, v)
+	}
+	if err != nil {
+		return ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%verror setting value: %s", mc, err)}
+	}
+
+	return nil
+}
+
+type messageContext struct {
+	res         *parseResult
+	file        fileDescriptorish
+	elementType string
+	elementName string
+	option      *dpb.UninterpretedOption
+	optAggPath  string
+}
+
+func (c *messageContext) String() string {
+	var ctx bytes.Buffer
+	if c.elementType != "file" {
+		fmt.Fprintf(&ctx, "%s %s: ", c.elementType, c.elementName)
+	}
+	if c.option != nil && c.option.Name != nil {
+		ctx.WriteString("option ")
+		writeOptionName(&ctx, c.option.Name)
+		if c.res.nodes == nil {
+			// if we have no source position info, try to provide as much context
+			// as possible (if nodes != nil, we don't need this because any errors
+			// will actually have file and line numbers)
+			if c.optAggPath != "" {
+				fmt.Fprintf(&ctx, " at %s", c.optAggPath)
+			}
+		}
+		ctx.WriteString(": ")
+	}
+	return ctx.String()
+}
+
+func writeOptionName(buf *bytes.Buffer, parts []*dpb.UninterpretedOption_NamePart) {
+	first := true
+	for _, p := range parts {
+		if first {
+			first = false
+		} else {
+			buf.WriteByte('.')
+		}
+		nm := p.GetNamePart()
+		if nm[0] == '.' {
+			// skip leading dot
+			nm = nm[1:]
+		}
+		if p.GetIsExtension() {
+			buf.WriteByte('(')
+			buf.WriteString(nm)
+			buf.WriteByte(')')
+		} else {
+			buf.WriteString(nm)
+		}
+	}
+}
+
+func fieldName(fld *desc.FieldDescriptor) string {
+	if fld.IsExtension() {
+		return fld.GetFullyQualifiedName()
+	} else {
+		return fld.GetName()
+	}
+}
+
+func valueKind(val interface{}) string {
+	switch val := val.(type) {
+	case identifier:
+		return "identifier"
+	case bool:
+		return "bool"
+	case int64:
+		if val < 0 {
+			return "negative integer"
+		}
+		return "integer"
+	case uint64:
+		return "integer"
+	case float64:
+		return "double"
+	case string, []byte:
+		return "string"
+	case []*aggregateEntryNode:
+		return "message"
+	default:
+		return fmt.Sprintf("%T", val)
+	}
+}
+
+func fieldValue(res *parseResult, mc *messageContext, fld fldDescriptorish, val valueNode, enumAsString bool) (interface{}, error) {
+	v := val.value()
+	t := fld.AsFieldDescriptorProto().GetType()
+	switch t {
+	case dpb.FieldDescriptorProto_TYPE_ENUM:
+		if id, ok := v.(identifier); ok {
+			ev := fld.GetEnumType().FindValueByName(string(id))
+			if ev == nil {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%venum %s has no value named %s", mc, fld.GetEnumType().GetFullyQualifiedName(), id)}
+			}
+			if enumAsString {
+				return ev.GetName(), nil
+			} else {
+				return ev.GetNumber(), nil
+			}
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting enum, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_MESSAGE, dpb.FieldDescriptorProto_TYPE_GROUP:
+		if aggs, ok := v.([]*aggregateEntryNode); ok {
+			fmd := fld.GetMessageType()
+			fdm := dynamic.NewMessage(fmd)
+			origPath := mc.optAggPath
+			defer func() {
+				mc.optAggPath = origPath
+			}()
+			for _, a := range aggs {
+				if origPath == "" {
+					mc.optAggPath = a.name.value()
+				} else {
+					mc.optAggPath = origPath + "." + a.name.value()
+				}
+				var ffld *desc.FieldDescriptor
+				if a.name.isExtension {
+					n := a.name.name.val
+					ffld = findExtension(mc.file, n, false, map[fileDescriptorish]struct{}{})
+					if ffld == nil {
+						// may need to qualify with package name
+						pkg := mc.file.GetPackage()
+						if pkg != "" {
+							ffld = findExtension(mc.file, pkg+"."+n, false, map[fileDescriptorish]struct{}{})
+						}
+					}
+				} else {
+					ffld = fmd.FindFieldByName(a.name.value())
+				}
+				if ffld == nil {
+					return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vfield %s not found", mc, a.name.name.val)}
+				}
+				if err := setOptionField(res, mc, fdm, ffld, a.name, a.val); err != nil {
+					return nil, err
+				}
+			}
+			return fdm, nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting message, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_BOOL:
+		if b, ok := v.(bool); ok {
+			return b, nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting bool, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_BYTES:
+		if str, ok := v.(string); ok {
+			return []byte(str), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting bytes, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_STRING:
+		if str, ok := v.(string); ok {
+			return str, nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting string, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_INT32, dpb.FieldDescriptorProto_TYPE_SINT32, dpb.FieldDescriptorProto_TYPE_SFIXED32:
+		if i, ok := v.(int64); ok {
+			if i > math.MaxInt32 || i < math.MinInt32 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for int32", mc, i)}
+			}
+			return int32(i), nil
+		}
+		if ui, ok := v.(uint64); ok {
+			if ui > math.MaxInt32 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for int32", mc, ui)}
+			}
+			return int32(ui), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting int32, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_UINT32, dpb.FieldDescriptorProto_TYPE_FIXED32:
+		if i, ok := v.(int64); ok {
+			if i > math.MaxUint32 || i < 0 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for uint32", mc, i)}
+			}
+			return uint32(i), nil
+		}
+		if ui, ok := v.(uint64); ok {
+			if ui > math.MaxUint32 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for uint32", mc, ui)}
+			}
+			return uint32(ui), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting uint32, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_INT64, dpb.FieldDescriptorProto_TYPE_SINT64, dpb.FieldDescriptorProto_TYPE_SFIXED64:
+		if i, ok := v.(int64); ok {
+			return i, nil
+		}
+		if ui, ok := v.(uint64); ok {
+			if ui > math.MaxInt64 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for int64", mc, ui)}
+			}
+			return int64(ui), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting int64, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_UINT64, dpb.FieldDescriptorProto_TYPE_FIXED64:
+		if i, ok := v.(int64); ok {
+			if i < 0 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for uint64", mc, i)}
+			}
+			return uint64(i), nil
+		}
+		if ui, ok := v.(uint64); ok {
+			return ui, nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting uint64, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_DOUBLE:
+		if d, ok := v.(float64); ok {
+			return d, nil
+		}
+		if i, ok := v.(int64); ok {
+			return float64(i), nil
+		}
+		if u, ok := v.(uint64); ok {
+			return float64(u), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting double, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_FLOAT:
+		if d, ok := v.(float64); ok {
+			if (d > math.MaxFloat32 || d < -math.MaxFloat32) && !math.IsInf(d, 1) && !math.IsInf(d, -1) && !math.IsNaN(d) {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %f is out of range for float", mc, d)}
+			}
+			return float32(d), nil
+		}
+		if i, ok := v.(int64); ok {
+			return float32(i), nil
+		}
+		if u, ok := v.(uint64); ok {
+			return float32(u), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting float, got %s", mc, valueKind(v))}
+	default:
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vunrecognized field type: %s", mc, t)}
+	}
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/parser.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/parser.go
new file mode 100644
index 0000000..ce9a3e4
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/parser.go
@@ -0,0 +1,1520 @@
+package protoparse
+
+import (
+	"bytes"
+	"errors"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"math"
+	"os"
+	"path/filepath"
+	"sort"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/desc"
+	"github.com/jhump/protoreflect/desc/internal"
+)
+
+//go:generate goyacc -o proto.y.go -p proto proto.y
+
+var errNoImportPathsForAbsoluteFilePath = errors.New("must specify at least one import path if any absolute file paths are given")
+
+func init() {
+	protoErrorVerbose = true
+
+	// fix up the generated "token name" array so that error messages are nicer
+	setTokenName(_STRING_LIT, "string literal")
+	setTokenName(_INT_LIT, "int literal")
+	setTokenName(_FLOAT_LIT, "float literal")
+	setTokenName(_NAME, "identifier")
+	setTokenName(_FQNAME, "fully-qualified name")
+	setTokenName(_TYPENAME, "type name")
+	setTokenName(_ERROR, "error")
+	// for keywords, just show the keyword itself wrapped in quotes
+	for str, i := range keywords {
+		setTokenName(i, fmt.Sprintf(`"%s"`, str))
+	}
+}
+
+func setTokenName(token int, text string) {
+	// NB: this is based on logic in generated parse code that translates the
+	// int returned from the lexer into an internal token number.
+	var intern int
+	if token < len(protoTok1) {
+		intern = protoTok1[token]
+	} else {
+		if token >= protoPrivate {
+			if token < protoPrivate+len(protoTok2) {
+				intern = protoTok2[token-protoPrivate]
+			}
+		}
+		if intern == 0 {
+			for i := 0; i+1 < len(protoTok3); i += 2 {
+				if protoTok3[i] == token {
+					intern = protoTok3[i+1]
+					break
+				}
+			}
+		}
+	}
+
+	if intern >= 1 && intern-1 < len(protoToknames) {
+		protoToknames[intern-1] = text
+		return
+	}
+
+	panic(fmt.Sprintf("Unknown token value: %d", token))
+}
+
+// FileAccessor is an abstraction for opening proto source files. It takes the
+// name of the file to open and returns either the input reader or an error.
+type FileAccessor func(filename string) (io.ReadCloser, error)
+
+// FileContentsFromMap returns a FileAccessor that uses the given map of file
+// contents. This allows proto source files to be constructed in memory and
+// easily supplied to a parser. The map keys are the paths to the proto source
+// files, and the values are the actual proto source contents.
+func FileContentsFromMap(files map[string]string) FileAccessor {
+	return func(filename string) (io.ReadCloser, error) {
+		contents, ok := files[filename]
+		if !ok {
+			return nil, os.ErrNotExist
+		}
+		return ioutil.NopCloser(strings.NewReader(contents)), nil
+	}
+}
+
+// ResolveFilenames tries to resolve fileNames into paths that are relative to
+// directories in the given importPaths. The returned slice has the results in
+// the same order as they are supplied in fileNames.
+//
+// The resulting names should be suitable for passing to Parser.ParseFiles.
+//
+// If importPaths is empty and any path is absolute, this returns error.
+// If importPaths is empty and all paths are relative, this returns the original fileNames.
+func ResolveFilenames(importPaths []string, fileNames ...string) ([]string, error) {
+	if len(importPaths) == 0 {
+		if containsAbsFilePath(fileNames) {
+			// We have to do this as otherwise parseProtoFiles can result in duplicate symbols.
+			// For example, assume we import "foo/bar/bar.proto" in a file "/home/alice/dev/foo/bar/baz.proto"
+			// as we call ParseFiles("/home/alice/dev/foo/bar/bar.proto","/home/alice/dev/foo/bar/baz.proto")
+			// with "/home/alice/dev" as our current directory. Due to the recursive nature of parseProtoFiles,
+			// it will discover the import "foo/bar/bar.proto" in the input file, and call parse on this,
+			// adding "foo/bar/bar.proto" to the parsed results, as well as "/home/alice/dev/foo/bar/bar.proto"
+			// from the input file list. This will result in a
+			// 'duplicate symbol SYMBOL: already defined as field in "/home/alice/dev/foo/bar/bar.proto'
+			// error being returned from ParseFiles.
+			return nil, errNoImportPathsForAbsoluteFilePath
+		}
+		return fileNames, nil
+	}
+	absImportPaths, err := absoluteFilePaths(importPaths)
+	if err != nil {
+		return nil, err
+	}
+	absFileNames, err := absoluteFilePaths(fileNames)
+	if err != nil {
+		return nil, err
+	}
+	resolvedFileNames := make([]string, 0, len(fileNames))
+	for _, absFileName := range absFileNames {
+		resolvedFileName, err := resolveAbsFilename(absImportPaths, absFileName)
+		if err != nil {
+			return nil, err
+		}
+		resolvedFileNames = append(resolvedFileNames, resolvedFileName)
+	}
+	return resolvedFileNames, nil
+}
+
+// Parser parses proto source into descriptors.
+type Parser struct {
+	// The paths used to search for dependencies that are referenced in import
+	// statements in proto source files. If no import paths are provided then
+	// "." (current directory) is assumed to be the only import path.
+	//
+	// This setting is only used during ParseFiles operations. Since calls to
+	// ParseFilesButDoNotLink do not link, there is no need to load and parse
+	// dependencies.
+	ImportPaths []string
+
+	// If true, the supplied file names/paths need not necessarily match how the
+	// files are referenced in import statements. The parser will attempt to
+	// match import statements to supplied paths, "guessing" the import paths
+	// for the files. Note that this inference is not perfect and link errors
+	// could result. It works best when all proto files are organized such that
+	// a single import path can be inferred (e.g. all files under a single tree
+	// with import statements all being relative to the root of this tree).
+	InferImportPaths bool
+
+	// Used to create a reader for a given filename, when loading proto source
+	// file contents. If unset, os.Open is used. If ImportPaths is also empty
+	// then relative paths are will be relative to the process's current working
+	// directory.
+	Accessor FileAccessor
+
+	// If true, the resulting file descriptors will retain source code info,
+	// that maps elements to their location in the source files as well as
+	// includes comments found during parsing (and attributed to elements of
+	// the source file).
+	IncludeSourceCodeInfo bool
+
+	// If true, the results from ParseFilesButDoNotLink will be passed through
+	// some additional validations. But only constraints that do not require
+	// linking can be checked. These include proto2 vs. proto3 language features,
+	// looking for incorrect usage of reserved names or tags, and ensuring that
+	// fields have unique tags and that enum values have unique numbers (unless
+	// the enum allows aliases).
+	ValidateUnlinkedFiles bool
+
+	// If true, the results from ParseFilesButDoNotLink will have options
+	// interpreted. Any uninterpretable options (including any custom options or
+	// options that refer to message and enum types, which can only be
+	// interpreted after linking) will be left in uninterpreted_options. Also,
+	// the "default" pseudo-option for fields can only be interpreted for scalar
+	// fields, excluding enums. (Interpreting default values for enum fields
+	// requires resolving enum names, which requires linking.)
+	InterpretOptionsInUnlinkedFiles bool
+}
+
+// ParseFiles parses the named files into descriptors. The returned slice has
+// the same number of entries as the give filenames, in the same order. So the
+// first returned descriptor corresponds to the first given name, and so on.
+//
+// All dependencies for all specified files (including transitive dependencies)
+// must be accessible via the parser's Accessor or a link error will occur. The
+// exception to this rule is that files can import standard Google-provided
+// files -- e.g. google/protobuf/*.proto -- without needing to supply sources
+// for these files. Like protoc, this parser has a built-in version of these
+// files it can use if they aren't explicitly supplied.
+func (p Parser) ParseFiles(filenames ...string) ([]*desc.FileDescriptor, error) {
+	accessor := p.Accessor
+	if accessor == nil {
+		accessor = func(name string) (io.ReadCloser, error) {
+			return os.Open(name)
+		}
+	}
+	paths := p.ImportPaths
+	if len(paths) > 0 {
+		acc := accessor
+		accessor = func(name string) (io.ReadCloser, error) {
+			var ret error
+			for _, path := range paths {
+				f, err := acc(filepath.Join(path, name))
+				if err != nil {
+					if ret == nil {
+						ret = err
+					}
+					continue
+				}
+				return f, nil
+			}
+			return nil, ret
+		}
+	}
+
+	protos := map[string]*parseResult{}
+	err := parseProtoFiles(accessor, filenames, true, true, protos)
+	if err != nil {
+		return nil, err
+	}
+	if p.InferImportPaths {
+		protos = fixupFilenames(protos)
+	}
+	linkedProtos, err := newLinker(protos).linkFiles()
+	if err != nil {
+		return nil, err
+	}
+	if p.IncludeSourceCodeInfo {
+		for name, fd := range linkedProtos {
+			pr := protos[name]
+			fd.AsFileDescriptorProto().SourceCodeInfo = pr.generateSourceCodeInfo()
+			internal.RecomputeSourceInfo(fd)
+		}
+	}
+	fds := make([]*desc.FileDescriptor, len(filenames))
+	for i, name := range filenames {
+		fd := linkedProtos[name]
+		fds[i] = fd
+	}
+	return fds, nil
+}
+
+// ParseFilesButDoNotLink parses the named files into descriptor protos. The
+// results are just protos, not fully-linked descriptors. It is possible that
+// descriptors are invalid and still be returned in parsed form without error
+// due to the fact that the linking step is skipped (and thus many validation
+// steps omitted).
+//
+// There are a few side effects to not linking the descriptors:
+//   1. No options will be interpreted. Options can refer to extensions or have
+//      message and enum types. Without linking, these extension and type
+//      references are not resolved, so the options may not be interpretable.
+//      So all options will appear in UninterpretedOption fields of the various
+//      descriptor options messages.
+//   2. Type references will not be resolved. This means that the actual type
+//      names in the descriptors may be unqualified and even relative to the
+//      scope in which the type reference appears. This goes for fields that
+//      have message and enum types. It also applies to methods and their
+//      references to request and response message types.
+//   3. Enum fields are not known. Until a field's type reference is resolved
+//      (during linking), it is not known whether the type refers to a message
+//      or an enum. So all fields with such type references have their Type set
+//      to TYPE_MESSAGE.
+//
+// This method will still validate the syntax of parsed files. If the parser's
+// ValidateUnlinkedFiles field is true, additional checks, beyond syntax will
+// also be performed.
+func (p Parser) ParseFilesButDoNotLink(filenames ...string) ([]*dpb.FileDescriptorProto, error) {
+	accessor := p.Accessor
+	if accessor == nil {
+		accessor = func(name string) (io.ReadCloser, error) {
+			return os.Open(name)
+		}
+	}
+
+	protos := map[string]*parseResult{}
+	err := parseProtoFiles(accessor, filenames, false, p.ValidateUnlinkedFiles, protos)
+	if err != nil {
+		return nil, err
+	}
+	if p.InferImportPaths {
+		protos = fixupFilenames(protos)
+	}
+	fds := make([]*dpb.FileDescriptorProto, len(filenames))
+	for i, name := range filenames {
+		pr := protos[name]
+		fd := pr.fd
+		if p.InterpretOptionsInUnlinkedFiles {
+			pr.lenient = true
+			interpretFileOptions(pr, poorFileDescriptorish{FileDescriptorProto: fd})
+		}
+		if p.IncludeSourceCodeInfo {
+			fd.SourceCodeInfo = pr.generateSourceCodeInfo()
+		}
+		fds[i] = fd
+	}
+	return fds, nil
+}
+
+func containsAbsFilePath(filePaths []string) bool {
+	for _, filePath := range filePaths {
+		if filepath.IsAbs(filePath) {
+			return true
+		}
+	}
+	return false
+}
+
+func absoluteFilePaths(filePaths []string) ([]string, error) {
+	absFilePaths := make([]string, 0, len(filePaths))
+	for _, filePath := range filePaths {
+		absFilePath, err := filepath.Abs(filePath)
+		if err != nil {
+			return nil, err
+		}
+		absFilePaths = append(absFilePaths, absFilePath)
+	}
+	return absFilePaths, nil
+}
+
+func resolveAbsFilename(absImportPaths []string, absFileName string) (string, error) {
+	for _, absImportPath := range absImportPaths {
+		if isDescendant(absImportPath, absFileName) {
+			resolvedPath, err := filepath.Rel(absImportPath, absFileName)
+			if err != nil {
+				return "", err
+			}
+			return resolvedPath, nil
+		}
+	}
+	return "", fmt.Errorf("%s does not reside in any import path", absFileName)
+}
+
+// isDescendant returns true if file is a descendant of dir.
+func isDescendant(dir, file string) bool {
+	dir = filepath.Clean(dir)
+	cur := file
+	for {
+		d := filepath.Dir(cur)
+		if d == dir {
+			return true
+		}
+		if d == "." || d == cur {
+			// we've run out of path elements
+			return false
+		}
+		cur = d
+	}
+}
+
+func fixupFilenames(protos map[string]*parseResult) map[string]*parseResult {
+	// In the event that the given filenames (keys in the supplied map) do not
+	// match the actual paths used in 'import' statements in the files, we try
+	// to revise names in the protos so that they will match and be linkable.
+	revisedProtos := map[string]*parseResult{}
+
+	protoPaths := map[string]struct{}{}
+	// TODO: this is O(n^2) but could likely be O(n) with a clever data structure (prefix tree that is indexed backwards?)
+	importCandidates := map[string]map[string]struct{}{}
+	candidatesAvailable := map[string]struct{}{}
+	for name := range protos {
+		candidatesAvailable[name] = struct{}{}
+		for _, f := range protos {
+			for _, imp := range f.fd.Dependency {
+				if strings.HasSuffix(name, imp) {
+					candidates := importCandidates[imp]
+					if candidates == nil {
+						candidates = map[string]struct{}{}
+						importCandidates[imp] = candidates
+					}
+					candidates[name] = struct{}{}
+				}
+			}
+		}
+	}
+	for imp, candidates := range importCandidates {
+		// if we found multiple possible candidates, use the one that is an exact match
+		// if it exists, and otherwise, guess that it's the shortest path (fewest elements)
+		var best string
+		for c := range candidates {
+			if _, ok := candidatesAvailable[c]; !ok {
+				// already used this candidate and re-written its filename accordingly
+				continue
+			}
+			if c == imp {
+				// exact match!
+				best = c
+				break
+			}
+			if best == "" {
+				best = c
+			} else {
+				// HACK: we can't actually tell which files is supposed to match
+				// this import, so arbitrarily pick the "shorter" one (fewest
+				// path elements) or, on a tie, the lexically earlier one
+				minLen := strings.Count(best, string(filepath.Separator))
+				cLen := strings.Count(c, string(filepath.Separator))
+				if cLen < minLen || (cLen == minLen && c < best) {
+					best = c
+				}
+			}
+		}
+		if best != "" {
+			prefix := best[:len(best)-len(imp)]
+			if len(prefix) > 0 {
+				protoPaths[prefix] = struct{}{}
+			}
+			f := protos[best]
+			f.fd.Name = proto.String(imp)
+			revisedProtos[imp] = f
+			delete(candidatesAvailable, best)
+		}
+	}
+
+	if len(candidatesAvailable) == 0 {
+		return revisedProtos
+	}
+
+	if len(protoPaths) == 0 {
+		for c := range candidatesAvailable {
+			revisedProtos[c] = protos[c]
+		}
+		return revisedProtos
+	}
+
+	// Any remaining candidates are entry-points (not imported by others), so
+	// the best bet to "fixing" their file name is to see if they're in one of
+	// the proto paths we found, and if so strip that prefix.
+	protoPathStrs := make([]string, len(protoPaths))
+	i := 0
+	for p := range protoPaths {
+		protoPathStrs[i] = p
+		i++
+	}
+	sort.Strings(protoPathStrs)
+	// we look at paths in reverse order, so we'll use a longer proto path if
+	// there is more than one match
+	for c := range candidatesAvailable {
+		var imp string
+		for i := len(protoPathStrs) - 1; i >= 0; i-- {
+			p := protoPathStrs[i]
+			if strings.HasPrefix(c, p) {
+				imp = c[len(p):]
+				break
+			}
+		}
+		if imp != "" {
+			f := protos[c]
+			f.fd.Name = proto.String(imp)
+			revisedProtos[imp] = f
+		} else {
+			revisedProtos[c] = protos[c]
+		}
+	}
+
+	return revisedProtos
+}
+
+func parseProtoFiles(acc FileAccessor, filenames []string, recursive, validate bool, parsed map[string]*parseResult) error {
+	for _, name := range filenames {
+		if _, ok := parsed[name]; ok {
+			continue
+		}
+		in, err := acc(name)
+		if err != nil {
+			if d, ok := standardImports[name]; ok {
+				parsed[name] = &parseResult{fd: d}
+				continue
+			}
+			return err
+		}
+		func() {
+			defer in.Close()
+			parsed[name], err = parseProto(name, in, validate)
+		}()
+		if err != nil {
+			return err
+		}
+		if recursive {
+			err = parseProtoFiles(acc, parsed[name].fd.Dependency, true, validate, parsed)
+			if err != nil {
+				return fmt.Errorf("failed to load imports for %q: %s", name, err)
+			}
+		}
+	}
+	return nil
+}
+
+type parseResult struct {
+	// the parsed file descriptor
+	fd *dpb.FileDescriptorProto
+
+	// if set to true, enables lenient interpretation of options, where
+	// unrecognized options will be left uninterpreted instead of resulting in a
+	// link error
+	lenient bool
+
+	// a map of elements in the descriptor to nodes in the AST
+	// (for extracting position information when validating the descriptor)
+	nodes map[proto.Message]node
+
+	// a map of uninterpreted option AST nodes to their relative path
+	// in the resulting options message
+	interpretedOptions map[*optionNode][]int32
+}
+
+func (r *parseResult) getFileNode(f *dpb.FileDescriptorProto) fileDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(f.GetName())}
+	}
+	return r.nodes[f].(fileDecl)
+}
+
+func (r *parseResult) getOptionNode(o *dpb.UninterpretedOption) optionDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[o].(optionDecl)
+}
+
+func (r *parseResult) getOptionNamePartNode(o *dpb.UninterpretedOption_NamePart) node {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[o]
+}
+
+func (r *parseResult) getMessageNode(m *dpb.DescriptorProto) msgDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[m].(msgDecl)
+}
+
+func (r *parseResult) getFieldNode(f *dpb.FieldDescriptorProto) fieldDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[f].(fieldDecl)
+}
+
+func (r *parseResult) getOneOfNode(o *dpb.OneofDescriptorProto) node {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[o]
+}
+
+func (r *parseResult) getExtensionRangeNode(e *dpb.DescriptorProto_ExtensionRange) rangeDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[e].(rangeDecl)
+}
+
+func (r *parseResult) getMessageReservedRangeNode(rr *dpb.DescriptorProto_ReservedRange) rangeDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[rr].(rangeDecl)
+}
+
+func (r *parseResult) getEnumNode(e *dpb.EnumDescriptorProto) node {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[e]
+}
+
+func (r *parseResult) getEnumValueNode(e *dpb.EnumValueDescriptorProto) enumValueDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[e].(enumValueDecl)
+}
+
+func (r *parseResult) getEnumReservedRangeNode(rr *dpb.EnumDescriptorProto_EnumReservedRange) rangeDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[rr].(rangeDecl)
+}
+
+func (r *parseResult) getServiceNode(s *dpb.ServiceDescriptorProto) node {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[s]
+}
+
+func (r *parseResult) getMethodNode(m *dpb.MethodDescriptorProto) methodDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[m].(methodDecl)
+}
+
+func (r *parseResult) putFileNode(f *dpb.FileDescriptorProto, n *fileNode) {
+	r.nodes[f] = n
+}
+
+func (r *parseResult) putOptionNode(o *dpb.UninterpretedOption, n *optionNode) {
+	r.nodes[o] = n
+}
+
+func (r *parseResult) putOptionNamePartNode(o *dpb.UninterpretedOption_NamePart, n *optionNamePartNode) {
+	r.nodes[o] = n
+}
+
+func (r *parseResult) putMessageNode(m *dpb.DescriptorProto, n msgDecl) {
+	r.nodes[m] = n
+}
+
+func (r *parseResult) putFieldNode(f *dpb.FieldDescriptorProto, n fieldDecl) {
+	r.nodes[f] = n
+}
+
+func (r *parseResult) putOneOfNode(o *dpb.OneofDescriptorProto, n *oneOfNode) {
+	r.nodes[o] = n
+}
+
+func (r *parseResult) putExtensionRangeNode(e *dpb.DescriptorProto_ExtensionRange, n *rangeNode) {
+	r.nodes[e] = n
+}
+
+func (r *parseResult) putMessageReservedRangeNode(rr *dpb.DescriptorProto_ReservedRange, n *rangeNode) {
+	r.nodes[rr] = n
+}
+
+func (r *parseResult) putEnumNode(e *dpb.EnumDescriptorProto, n *enumNode) {
+	r.nodes[e] = n
+}
+
+func (r *parseResult) putEnumValueNode(e *dpb.EnumValueDescriptorProto, n *enumValueNode) {
+	r.nodes[e] = n
+}
+
+func (r *parseResult) putEnumReservedRangeNode(rr *dpb.EnumDescriptorProto_EnumReservedRange, n *rangeNode) {
+	r.nodes[rr] = n
+}
+
+func (r *parseResult) putServiceNode(s *dpb.ServiceDescriptorProto, n *serviceNode) {
+	r.nodes[s] = n
+}
+
+func (r *parseResult) putMethodNode(m *dpb.MethodDescriptorProto, n *methodNode) {
+	r.nodes[m] = n
+}
+
+func parseProto(filename string, r io.Reader, validate bool) (*parseResult, error) {
+	lx := newLexer(r)
+	lx.filename = filename
+	protoParse(lx)
+	if lx.err != nil {
+		if _, ok := lx.err.(ErrorWithSourcePos); ok {
+			return nil, lx.err
+		} else {
+			return nil, ErrorWithSourcePos{Pos: lx.prev(), Underlying: lx.err}
+		}
+	}
+	// parser will not return an error if input is empty, so we
+	// need to also check if the result is non-nil
+	if lx.res == nil {
+		return nil, ErrorWithSourcePos{Pos: lx.prev(), Underlying: errors.New("input is empty")}
+	}
+
+	res, err := createParseResult(filename, lx.res)
+	if err != nil {
+		return nil, err
+	}
+	if validate {
+		if err := basicValidate(res); err != nil {
+			return nil, err
+		}
+	}
+	return res, nil
+}
+
+func createParseResult(filename string, file *fileNode) (*parseResult, error) {
+	res := &parseResult{
+		nodes:              map[proto.Message]node{},
+		interpretedOptions: map[*optionNode][]int32{},
+	}
+	err := res.createFileDescriptor(filename, file)
+	return res, err
+}
+
+func (r *parseResult) createFileDescriptor(filename string, file *fileNode) error {
+	fd := &dpb.FileDescriptorProto{Name: proto.String(filename)}
+	r.putFileNode(fd, file)
+
+	isProto3 := false
+	if file.syntax != nil {
+		isProto3 = file.syntax.syntax.val == "proto3"
+		// proto2 is the default, so no need to set unless proto3
+		if isProto3 {
+			fd.Syntax = proto.String(file.syntax.syntax.val)
+		}
+	}
+
+	for _, decl := range file.decls {
+		if decl.enum != nil {
+			fd.EnumType = append(fd.EnumType, r.asEnumDescriptor(decl.enum))
+		} else if decl.extend != nil {
+			r.addExtensions(decl.extend, &fd.Extension, &fd.MessageType, isProto3)
+		} else if decl.imp != nil {
+			file.imports = append(file.imports, decl.imp)
+			index := len(fd.Dependency)
+			fd.Dependency = append(fd.Dependency, decl.imp.name.val)
+			if decl.imp.public {
+				fd.PublicDependency = append(fd.PublicDependency, int32(index))
+			} else if decl.imp.weak {
+				fd.WeakDependency = append(fd.WeakDependency, int32(index))
+			}
+		} else if decl.message != nil {
+			fd.MessageType = append(fd.MessageType, r.asMessageDescriptor(decl.message, isProto3))
+		} else if decl.option != nil {
+			if fd.Options == nil {
+				fd.Options = &dpb.FileOptions{}
+			}
+			fd.Options.UninterpretedOption = append(fd.Options.UninterpretedOption, r.asUninterpretedOption(decl.option))
+		} else if decl.service != nil {
+			fd.Service = append(fd.Service, r.asServiceDescriptor(decl.service))
+		} else if decl.pkg != nil {
+			if fd.Package != nil {
+				return ErrorWithSourcePos{Pos: decl.pkg.start(), Underlying: errors.New("files should have only one package declaration")}
+			}
+			file.pkg = decl.pkg
+			fd.Package = proto.String(decl.pkg.name.val)
+		}
+	}
+	r.fd = fd
+	return nil
+}
+
+func (r *parseResult) asUninterpretedOptions(nodes []*optionNode) []*dpb.UninterpretedOption {
+	opts := make([]*dpb.UninterpretedOption, len(nodes))
+	for i, n := range nodes {
+		opts[i] = r.asUninterpretedOption(n)
+	}
+	return opts
+}
+
+func (r *parseResult) asUninterpretedOption(node *optionNode) *dpb.UninterpretedOption {
+	opt := &dpb.UninterpretedOption{Name: r.asUninterpretedOptionName(node.name.parts)}
+	r.putOptionNode(opt, node)
+
+	switch val := node.val.value().(type) {
+	case bool:
+		if val {
+			opt.IdentifierValue = proto.String("true")
+		} else {
+			opt.IdentifierValue = proto.String("false")
+		}
+	case int64:
+		opt.NegativeIntValue = proto.Int64(val)
+	case uint64:
+		opt.PositiveIntValue = proto.Uint64(val)
+	case float64:
+		opt.DoubleValue = proto.Float64(val)
+	case string:
+		opt.StringValue = []byte(val)
+	case identifier:
+		opt.IdentifierValue = proto.String(string(val))
+	case []*aggregateEntryNode:
+		var buf bytes.Buffer
+		aggToString(val, &buf)
+		aggStr := buf.String()
+		opt.AggregateValue = proto.String(aggStr)
+	}
+	return opt
+}
+
+func (r *parseResult) asUninterpretedOptionName(parts []*optionNamePartNode) []*dpb.UninterpretedOption_NamePart {
+	ret := make([]*dpb.UninterpretedOption_NamePart, len(parts))
+	for i, part := range parts {
+		txt := part.text.val
+		if !part.isExtension {
+			txt = part.text.val[part.offset : part.offset+part.length]
+		}
+		np := &dpb.UninterpretedOption_NamePart{
+			NamePart:    proto.String(txt),
+			IsExtension: proto.Bool(part.isExtension),
+		}
+		r.putOptionNamePartNode(np, part)
+		ret[i] = np
+	}
+	return ret
+}
+
+func (r *parseResult) addExtensions(ext *extendNode, flds *[]*dpb.FieldDescriptorProto, msgs *[]*dpb.DescriptorProto, isProto3 bool) {
+	extendee := ext.extendee.val
+	for _, decl := range ext.decls {
+		if decl.field != nil {
+			decl.field.extendee = ext
+			fd := r.asFieldDescriptor(decl.field)
+			fd.Extendee = proto.String(extendee)
+			*flds = append(*flds, fd)
+		} else if decl.group != nil {
+			decl.group.extendee = ext
+			fd, md := r.asGroupDescriptors(decl.group, isProto3)
+			fd.Extendee = proto.String(extendee)
+			*flds = append(*flds, fd)
+			*msgs = append(*msgs, md)
+		}
+	}
+}
+
+func asLabel(lbl *labelNode) *dpb.FieldDescriptorProto_Label {
+	if lbl == nil {
+		return nil
+	}
+	switch {
+	case lbl.repeated:
+		return dpb.FieldDescriptorProto_LABEL_REPEATED.Enum()
+	case lbl.required:
+		return dpb.FieldDescriptorProto_LABEL_REQUIRED.Enum()
+	default:
+		return dpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum()
+	}
+}
+
+func (r *parseResult) asFieldDescriptor(node *fieldNode) *dpb.FieldDescriptorProto {
+	fd := newFieldDescriptor(node.name.val, node.fldType.val, int32(node.tag.val), asLabel(node.label))
+	r.putFieldNode(fd, node)
+	if len(node.options) > 0 {
+		fd.Options = &dpb.FieldOptions{UninterpretedOption: r.asUninterpretedOptions(node.options)}
+	}
+	return fd
+}
+
+func newFieldDescriptor(name string, fieldType string, tag int32, lbl *dpb.FieldDescriptorProto_Label) *dpb.FieldDescriptorProto {
+	fd := &dpb.FieldDescriptorProto{
+		Name:     proto.String(name),
+		JsonName: proto.String(internal.JsonName(name)),
+		Number:   proto.Int32(tag),
+		Label:    lbl,
+	}
+	switch fieldType {
+	case "double":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_DOUBLE.Enum()
+	case "float":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_FLOAT.Enum()
+	case "int32":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_INT32.Enum()
+	case "int64":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_INT64.Enum()
+	case "uint32":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_UINT32.Enum()
+	case "uint64":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_UINT64.Enum()
+	case "sint32":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_SINT32.Enum()
+	case "sint64":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_SINT64.Enum()
+	case "fixed32":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_FIXED32.Enum()
+	case "fixed64":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_FIXED64.Enum()
+	case "sfixed32":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_SFIXED32.Enum()
+	case "sfixed64":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_SFIXED64.Enum()
+	case "bool":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_BOOL.Enum()
+	case "string":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_STRING.Enum()
+	case "bytes":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_BYTES.Enum()
+	default:
+		// NB: we don't have enough info to determine whether this is an enum or a message type,
+		// so we'll change it to enum later once we can ascertain if it's an enum reference
+		fd.Type = dpb.FieldDescriptorProto_TYPE_MESSAGE.Enum()
+		fd.TypeName = proto.String(fieldType)
+	}
+	return fd
+}
+
+func (r *parseResult) asGroupDescriptors(group *groupNode, isProto3 bool) (*dpb.FieldDescriptorProto, *dpb.DescriptorProto) {
+	fieldName := strings.ToLower(group.name.val)
+	fd := &dpb.FieldDescriptorProto{
+		Name:     proto.String(fieldName),
+		JsonName: proto.String(internal.JsonName(fieldName)),
+		Number:   proto.Int32(int32(group.tag.val)),
+		Label:    asLabel(group.label),
+		Type:     dpb.FieldDescriptorProto_TYPE_GROUP.Enum(),
+		TypeName: proto.String(group.name.val),
+	}
+	r.putFieldNode(fd, group)
+	md := &dpb.DescriptorProto{Name: proto.String(group.name.val)}
+	r.putMessageNode(md, group)
+	r.addMessageDecls(md, &group.reserved, group.decls, isProto3)
+	return fd, md
+}
+
+func (r *parseResult) asMapDescriptors(mapField *mapFieldNode, isProto3 bool) (*dpb.FieldDescriptorProto, *dpb.DescriptorProto) {
+	var lbl *dpb.FieldDescriptorProto_Label
+	if !isProto3 {
+		lbl = dpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum()
+	}
+	keyFd := newFieldDescriptor("key", mapField.keyType.val, 1, lbl)
+	r.putFieldNode(keyFd, mapField.keyField())
+	valFd := newFieldDescriptor("value", mapField.valueType.val, 2, lbl)
+	r.putFieldNode(valFd, mapField.valueField())
+	entryName := internal.InitCap(internal.JsonName(mapField.name.val)) + "Entry"
+	fd := newFieldDescriptor(mapField.name.val, entryName, int32(mapField.tag.val), dpb.FieldDescriptorProto_LABEL_REPEATED.Enum())
+	if len(mapField.options) > 0 {
+		fd.Options = &dpb.FieldOptions{UninterpretedOption: r.asUninterpretedOptions(mapField.options)}
+	}
+	r.putFieldNode(fd, mapField)
+	md := &dpb.DescriptorProto{
+		Name:    proto.String(entryName),
+		Options: &dpb.MessageOptions{MapEntry: proto.Bool(true)},
+		Field:   []*dpb.FieldDescriptorProto{keyFd, valFd},
+	}
+	r.putMessageNode(md, mapField)
+	return fd, md
+}
+
+func (r *parseResult) asExtensionRanges(node *extensionRangeNode) []*dpb.DescriptorProto_ExtensionRange {
+	opts := r.asUninterpretedOptions(node.options)
+	ers := make([]*dpb.DescriptorProto_ExtensionRange, len(node.ranges))
+	for i, rng := range node.ranges {
+		er := &dpb.DescriptorProto_ExtensionRange{
+			Start: proto.Int32(rng.st),
+			End:   proto.Int32(rng.en + 1),
+		}
+		if len(opts) > 0 {
+			er.Options = &dpb.ExtensionRangeOptions{UninterpretedOption: opts}
+		}
+		r.putExtensionRangeNode(er, rng)
+		ers[i] = er
+	}
+	return ers
+}
+
+func (r *parseResult) asEnumValue(ev *enumValueNode) *dpb.EnumValueDescriptorProto {
+	var num int32
+	if ev.numberP != nil {
+		num = int32(ev.numberP.val)
+	} else {
+		num = int32(ev.numberN.val)
+	}
+	evd := &dpb.EnumValueDescriptorProto{Name: proto.String(ev.name.val), Number: proto.Int32(num)}
+	r.putEnumValueNode(evd, ev)
+	if len(ev.options) > 0 {
+		evd.Options = &dpb.EnumValueOptions{UninterpretedOption: r.asUninterpretedOptions(ev.options)}
+	}
+	return evd
+}
+
+func (r *parseResult) asMethodDescriptor(node *methodNode) *dpb.MethodDescriptorProto {
+	md := &dpb.MethodDescriptorProto{
+		Name:       proto.String(node.name.val),
+		InputType:  proto.String(node.input.msgType.val),
+		OutputType: proto.String(node.output.msgType.val),
+	}
+	r.putMethodNode(md, node)
+	if node.input.streamKeyword != nil {
+		md.ClientStreaming = proto.Bool(true)
+	}
+	if node.output.streamKeyword != nil {
+		md.ServerStreaming = proto.Bool(true)
+	}
+	// protoc always adds a MethodOptions if there are brackets
+	// We have a non-nil node.options if there are brackets
+	// We do the same to match protoc as closely as possible
+	// https://github.com/protocolbuffers/protobuf/blob/0c3f43a6190b77f1f68b7425d1b7e1a8257a8d0c/src/google/protobuf/compiler/parser.cc#L2152
+	if node.options != nil {
+		md.Options = &dpb.MethodOptions{UninterpretedOption: r.asUninterpretedOptions(node.options)}
+	}
+	return md
+}
+
+func (r *parseResult) asEnumDescriptor(en *enumNode) *dpb.EnumDescriptorProto {
+	ed := &dpb.EnumDescriptorProto{Name: proto.String(en.name.val)}
+	r.putEnumNode(ed, en)
+	for _, decl := range en.decls {
+		if decl.option != nil {
+			if ed.Options == nil {
+				ed.Options = &dpb.EnumOptions{}
+			}
+			ed.Options.UninterpretedOption = append(ed.Options.UninterpretedOption, r.asUninterpretedOption(decl.option))
+		} else if decl.value != nil {
+			ed.Value = append(ed.Value, r.asEnumValue(decl.value))
+		} else if decl.reserved != nil {
+			for _, n := range decl.reserved.names {
+				en.reserved = append(en.reserved, n)
+				ed.ReservedName = append(ed.ReservedName, n.val)
+			}
+			for _, rng := range decl.reserved.ranges {
+				ed.ReservedRange = append(ed.ReservedRange, r.asEnumReservedRange(rng))
+			}
+		}
+	}
+	return ed
+}
+
+func (r *parseResult) asEnumReservedRange(rng *rangeNode) *dpb.EnumDescriptorProto_EnumReservedRange {
+	rr := &dpb.EnumDescriptorProto_EnumReservedRange{
+		Start: proto.Int32(rng.st),
+		End:   proto.Int32(rng.en),
+	}
+	r.putEnumReservedRangeNode(rr, rng)
+	return rr
+}
+
+func (r *parseResult) asMessageDescriptor(node *messageNode, isProto3 bool) *dpb.DescriptorProto {
+	msgd := &dpb.DescriptorProto{Name: proto.String(node.name.val)}
+	r.putMessageNode(msgd, node)
+	r.addMessageDecls(msgd, &node.reserved, node.decls, isProto3)
+	return msgd
+}
+
+func (r *parseResult) addMessageDecls(msgd *dpb.DescriptorProto, reservedNames *[]*stringLiteralNode, decls []*messageElement, isProto3 bool) {
+	for _, decl := range decls {
+		if decl.enum != nil {
+			msgd.EnumType = append(msgd.EnumType, r.asEnumDescriptor(decl.enum))
+		} else if decl.extend != nil {
+			r.addExtensions(decl.extend, &msgd.Extension, &msgd.NestedType, isProto3)
+		} else if decl.extensionRange != nil {
+			msgd.ExtensionRange = append(msgd.ExtensionRange, r.asExtensionRanges(decl.extensionRange)...)
+		} else if decl.field != nil {
+			msgd.Field = append(msgd.Field, r.asFieldDescriptor(decl.field))
+		} else if decl.mapField != nil {
+			fd, md := r.asMapDescriptors(decl.mapField, isProto3)
+			msgd.Field = append(msgd.Field, fd)
+			msgd.NestedType = append(msgd.NestedType, md)
+		} else if decl.group != nil {
+			fd, md := r.asGroupDescriptors(decl.group, isProto3)
+			msgd.Field = append(msgd.Field, fd)
+			msgd.NestedType = append(msgd.NestedType, md)
+		} else if decl.oneOf != nil {
+			oodIndex := len(msgd.OneofDecl)
+			ood := &dpb.OneofDescriptorProto{Name: proto.String(decl.oneOf.name.val)}
+			r.putOneOfNode(ood, decl.oneOf)
+			msgd.OneofDecl = append(msgd.OneofDecl, ood)
+			for _, oodecl := range decl.oneOf.decls {
+				if oodecl.option != nil {
+					if ood.Options == nil {
+						ood.Options = &dpb.OneofOptions{}
+					}
+					ood.Options.UninterpretedOption = append(ood.Options.UninterpretedOption, r.asUninterpretedOption(oodecl.option))
+				} else if oodecl.field != nil {
+					fd := r.asFieldDescriptor(oodecl.field)
+					fd.OneofIndex = proto.Int32(int32(oodIndex))
+					msgd.Field = append(msgd.Field, fd)
+				}
+			}
+		} else if decl.option != nil {
+			if msgd.Options == nil {
+				msgd.Options = &dpb.MessageOptions{}
+			}
+			msgd.Options.UninterpretedOption = append(msgd.Options.UninterpretedOption, r.asUninterpretedOption(decl.option))
+		} else if decl.nested != nil {
+			msgd.NestedType = append(msgd.NestedType, r.asMessageDescriptor(decl.nested, isProto3))
+		} else if decl.reserved != nil {
+			for _, n := range decl.reserved.names {
+				*reservedNames = append(*reservedNames, n)
+				msgd.ReservedName = append(msgd.ReservedName, n.val)
+			}
+			for _, rng := range decl.reserved.ranges {
+				msgd.ReservedRange = append(msgd.ReservedRange, r.asMessageReservedRange(rng))
+			}
+		}
+	}
+}
+
+func (r *parseResult) asMessageReservedRange(rng *rangeNode) *dpb.DescriptorProto_ReservedRange {
+	rr := &dpb.DescriptorProto_ReservedRange{
+		Start: proto.Int32(rng.st),
+		End:   proto.Int32(rng.en + 1),
+	}
+	r.putMessageReservedRangeNode(rr, rng)
+	return rr
+}
+
+func (r *parseResult) asServiceDescriptor(svc *serviceNode) *dpb.ServiceDescriptorProto {
+	sd := &dpb.ServiceDescriptorProto{Name: proto.String(svc.name.val)}
+	r.putServiceNode(sd, svc)
+	for _, decl := range svc.decls {
+		if decl.option != nil {
+			if sd.Options == nil {
+				sd.Options = &dpb.ServiceOptions{}
+			}
+			sd.Options.UninterpretedOption = append(sd.Options.UninterpretedOption, r.asUninterpretedOption(decl.option))
+		} else if decl.rpc != nil {
+			sd.Method = append(sd.Method, r.asMethodDescriptor(decl.rpc))
+		}
+	}
+	return sd
+}
+
+func toNameParts(ident *identNode, offset int) []*optionNamePartNode {
+	parts := strings.Split(ident.val[offset:], ".")
+	ret := make([]*optionNamePartNode, len(parts))
+	for i, p := range parts {
+		ret[i] = &optionNamePartNode{text: ident, offset: offset, length: len(p)}
+		ret[i].setRange(ident, ident)
+		offset += len(p) + 1
+	}
+	return ret
+}
+
+func checkUint64InInt32Range(lex protoLexer, pos *SourcePos, v uint64) {
+	if v > math.MaxInt32 {
+		lexError(lex, pos, fmt.Sprintf("constant %d is out of range for int32 (%d to %d)", v, math.MinInt32, math.MaxInt32))
+	}
+}
+
+func checkInt64InInt32Range(lex protoLexer, pos *SourcePos, v int64) {
+	if v > math.MaxInt32 || v < math.MinInt32 {
+		lexError(lex, pos, fmt.Sprintf("constant %d is out of range for int32 (%d to %d)", v, math.MinInt32, math.MaxInt32))
+	}
+}
+
+func checkTag(lex protoLexer, pos *SourcePos, v uint64) {
+	if v > internal.MaxTag {
+		lexError(lex, pos, fmt.Sprintf("tag number %d is higher than max allowed tag number (%d)", v, internal.MaxTag))
+	} else if v >= internal.SpecialReservedStart && v <= internal.SpecialReservedEnd {
+		lexError(lex, pos, fmt.Sprintf("tag number %d is in disallowed reserved range %d-%d", v, internal.SpecialReservedStart, internal.SpecialReservedEnd))
+	}
+}
+
+func aggToString(agg []*aggregateEntryNode, buf *bytes.Buffer) {
+	buf.WriteString("{")
+	for _, a := range agg {
+		buf.WriteString(" ")
+		buf.WriteString(a.name.value())
+		if v, ok := a.val.(*aggregateLiteralNode); ok {
+			aggToString(v.elements, buf)
+		} else {
+			buf.WriteString(": ")
+			elementToString(a.val.value(), buf)
+		}
+	}
+	buf.WriteString(" }")
+}
+
+func elementToString(v interface{}, buf *bytes.Buffer) {
+	switch v := v.(type) {
+	case bool, int64, uint64, identifier:
+		fmt.Fprintf(buf, "%v", v)
+	case float64:
+		if math.IsInf(v, 1) {
+			buf.WriteString(": inf")
+		} else if math.IsInf(v, -1) {
+			buf.WriteString(": -inf")
+		} else if math.IsNaN(v) {
+			buf.WriteString(": nan")
+		} else {
+			fmt.Fprintf(buf, ": %v", v)
+		}
+	case string:
+		buf.WriteRune('"')
+		writeEscapedBytes(buf, []byte(v))
+		buf.WriteRune('"')
+	case []valueNode:
+		buf.WriteString(": [")
+		first := true
+		for _, e := range v {
+			if first {
+				first = false
+			} else {
+				buf.WriteString(", ")
+			}
+			elementToString(e.value(), buf)
+		}
+		buf.WriteString("]")
+	case []*aggregateEntryNode:
+		aggToString(v, buf)
+	}
+}
+
+func writeEscapedBytes(buf *bytes.Buffer, b []byte) {
+	for _, c := range b {
+		switch c {
+		case '\n':
+			buf.WriteString("\\n")
+		case '\r':
+			buf.WriteString("\\r")
+		case '\t':
+			buf.WriteString("\\t")
+		case '"':
+			buf.WriteString("\\\"")
+		case '\'':
+			buf.WriteString("\\'")
+		case '\\':
+			buf.WriteString("\\\\")
+		default:
+			if c >= 0x20 && c <= 0x7f && c != '"' && c != '\\' {
+				// simple printable characters
+				buf.WriteByte(c)
+			} else {
+				// use octal escape for all other values
+				buf.WriteRune('\\')
+				buf.WriteByte('0' + ((c >> 6) & 0x7))
+				buf.WriteByte('0' + ((c >> 3) & 0x7))
+				buf.WriteByte('0' + (c & 0x7))
+			}
+		}
+	}
+}
+
+func basicValidate(res *parseResult) error {
+	fd := res.fd
+	isProto3 := fd.GetSyntax() == "proto3"
+
+	for _, md := range fd.MessageType {
+		if err := validateMessage(res, isProto3, "", md); err != nil {
+			return err
+		}
+	}
+
+	for _, ed := range fd.EnumType {
+		if err := validateEnum(res, isProto3, "", ed); err != nil {
+			return err
+		}
+	}
+
+	for _, fld := range fd.Extension {
+		if err := validateField(res, isProto3, "", fld); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func validateMessage(res *parseResult, isProto3 bool, prefix string, md *dpb.DescriptorProto) error {
+	nextPrefix := md.GetName() + "."
+
+	for _, fld := range md.Field {
+		if err := validateField(res, isProto3, nextPrefix, fld); err != nil {
+			return err
+		}
+	}
+	for _, fld := range md.Extension {
+		if err := validateField(res, isProto3, nextPrefix, fld); err != nil {
+			return err
+		}
+	}
+	for _, ed := range md.EnumType {
+		if err := validateEnum(res, isProto3, nextPrefix, ed); err != nil {
+			return err
+		}
+	}
+	for _, nmd := range md.NestedType {
+		if err := validateMessage(res, isProto3, nextPrefix, nmd); err != nil {
+			return err
+		}
+	}
+
+	scope := fmt.Sprintf("message %s%s", prefix, md.GetName())
+
+	if isProto3 && len(md.ExtensionRange) > 0 {
+		n := res.getExtensionRangeNode(md.ExtensionRange[0])
+		return ErrorWithSourcePos{Pos: n.start(), Underlying: fmt.Errorf("%s: extension ranges are not allowed in proto3", scope)}
+	}
+
+	if index, err := findOption(res, scope, md.Options.GetUninterpretedOption(), "map_entry"); err != nil {
+		return err
+	} else if index >= 0 {
+		opt := md.Options.UninterpretedOption[index]
+		optn := res.getOptionNode(opt)
+		md.Options.UninterpretedOption = removeOption(md.Options.UninterpretedOption, index)
+		valid := false
+		if opt.IdentifierValue != nil {
+			if opt.GetIdentifierValue() == "true" {
+				return ErrorWithSourcePos{Pos: optn.getValue().start(), Underlying: fmt.Errorf("%s: map_entry option should not be set explicitly; use map type instead", scope)}
+			} else if opt.GetIdentifierValue() == "false" {
+				md.Options.MapEntry = proto.Bool(false)
+				valid = true
+			}
+		}
+		if !valid {
+			return ErrorWithSourcePos{Pos: optn.getValue().start(), Underlying: fmt.Errorf("%s: expecting bool value for map_entry option", scope)}
+		}
+	}
+
+	// reserved ranges should not overlap
+	rsvd := make(tagRanges, len(md.ReservedRange))
+	for i, r := range md.ReservedRange {
+		n := res.getMessageReservedRangeNode(r)
+		rsvd[i] = tagRange{start: r.GetStart(), end: r.GetEnd(), node: n}
+
+	}
+	sort.Sort(rsvd)
+	for i := 1; i < len(rsvd); i++ {
+		if rsvd[i].start < rsvd[i-1].end {
+			return ErrorWithSourcePos{Pos: rsvd[i].node.start(), Underlying: fmt.Errorf("%s: reserved ranges overlap: %d to %d and %d to %d", scope, rsvd[i-1].start, rsvd[i-1].end-1, rsvd[i].start, rsvd[i].end-1)}
+		}
+	}
+
+	// extensions ranges should not overlap
+	exts := make(tagRanges, len(md.ExtensionRange))
+	for i, r := range md.ExtensionRange {
+		n := res.getExtensionRangeNode(r)
+		exts[i] = tagRange{start: r.GetStart(), end: r.GetEnd(), node: n}
+	}
+	sort.Sort(exts)
+	for i := 1; i < len(exts); i++ {
+		if exts[i].start < exts[i-1].end {
+			return ErrorWithSourcePos{Pos: exts[i].node.start(), Underlying: fmt.Errorf("%s: extension ranges overlap: %d to %d and %d to %d", scope, exts[i-1].start, exts[i-1].end-1, exts[i].start, exts[i].end-1)}
+		}
+	}
+
+	// see if any extension range overlaps any reserved range
+	var i, j int // i indexes rsvd; j indexes exts
+	for i < len(rsvd) && j < len(exts) {
+		if rsvd[i].start >= exts[j].start && rsvd[i].start < exts[j].end ||
+			exts[j].start >= rsvd[i].start && exts[j].start < rsvd[i].end {
+
+			var pos *SourcePos
+			if rsvd[i].start >= exts[j].start && rsvd[i].start < exts[j].end {
+				pos = rsvd[i].node.start()
+			} else {
+				pos = exts[j].node.start()
+			}
+			// ranges overlap
+			return ErrorWithSourcePos{Pos: pos, Underlying: fmt.Errorf("%s: extension range %d to %d overlaps reserved range %d to %d", scope, exts[j].start, exts[j].end-1, rsvd[i].start, rsvd[i].end-1)}
+		}
+		if rsvd[i].start < exts[j].start {
+			i++
+		} else {
+			j++
+		}
+	}
+
+	// now, check that fields don't re-use tags and don't try to use extension
+	// or reserved ranges or reserved names
+	rsvdNames := map[string]struct{}{}
+	for _, n := range md.ReservedName {
+		rsvdNames[n] = struct{}{}
+	}
+	fieldTags := map[int32]string{}
+	for _, fld := range md.Field {
+		fn := res.getFieldNode(fld)
+		if _, ok := rsvdNames[fld.GetName()]; ok {
+			return ErrorWithSourcePos{Pos: fn.fieldName().start(), Underlying: fmt.Errorf("%s: field %s is using a reserved name", scope, fld.GetName())}
+		}
+		if existing := fieldTags[fld.GetNumber()]; existing != "" {
+			return ErrorWithSourcePos{Pos: fn.fieldTag().start(), Underlying: fmt.Errorf("%s: fields %s and %s both have the same tag %d", scope, existing, fld.GetName(), fld.GetNumber())}
+		}
+		fieldTags[fld.GetNumber()] = fld.GetName()
+		// check reserved ranges
+		r := sort.Search(len(rsvd), func(index int) bool { return rsvd[index].end > fld.GetNumber() })
+		if r < len(rsvd) && rsvd[r].start <= fld.GetNumber() {
+			return ErrorWithSourcePos{Pos: fn.fieldTag().start(), Underlying: fmt.Errorf("%s: field %s is using tag %d which is in reserved range %d to %d", scope, fld.GetName(), fld.GetNumber(), rsvd[r].start, rsvd[r].end-1)}
+		}
+		// and check extension ranges
+		e := sort.Search(len(exts), func(index int) bool { return exts[index].end > fld.GetNumber() })
+		if e < len(exts) && exts[e].start <= fld.GetNumber() {
+			return ErrorWithSourcePos{Pos: fn.fieldTag().start(), Underlying: fmt.Errorf("%s: field %s is using tag %d which is in extension range %d to %d", scope, fld.GetName(), fld.GetNumber(), exts[e].start, exts[e].end-1)}
+		}
+	}
+
+	return nil
+}
+
+func validateEnum(res *parseResult, isProto3 bool, prefix string, ed *dpb.EnumDescriptorProto) error {
+	scope := fmt.Sprintf("enum %s%s", prefix, ed.GetName())
+
+	if index, err := findOption(res, scope, ed.Options.GetUninterpretedOption(), "allow_alias"); err != nil {
+		return err
+	} else if index >= 0 {
+		opt := ed.Options.UninterpretedOption[index]
+		ed.Options.UninterpretedOption = removeOption(ed.Options.UninterpretedOption, index)
+		valid := false
+		if opt.IdentifierValue != nil {
+			if opt.GetIdentifierValue() == "true" {
+				ed.Options.AllowAlias = proto.Bool(true)
+				valid = true
+			} else if opt.GetIdentifierValue() == "false" {
+				ed.Options.AllowAlias = proto.Bool(false)
+				valid = true
+			}
+		}
+		if !valid {
+			optNode := res.getOptionNode(opt)
+			return ErrorWithSourcePos{Pos: optNode.getValue().start(), Underlying: fmt.Errorf("%s: expecting bool value for allow_alias option", scope)}
+		}
+	}
+
+	if isProto3 && ed.Value[0].GetNumber() != 0 {
+		evNode := res.getEnumValueNode(ed.Value[0])
+		return ErrorWithSourcePos{Pos: evNode.getNumber().start(), Underlying: fmt.Errorf("%s: proto3 requires that first value in enum have numeric value of 0", scope)}
+	}
+
+	if !ed.Options.GetAllowAlias() {
+		// make sure all value numbers are distinct
+		vals := map[int32]string{}
+		for _, evd := range ed.Value {
+			if existing := vals[evd.GetNumber()]; existing != "" {
+				evNode := res.getEnumValueNode(evd)
+				return ErrorWithSourcePos{Pos: evNode.getNumber().start(), Underlying: fmt.Errorf("%s: values %s and %s both have the same numeric value %d; use allow_alias option if intentional", scope, existing, evd.GetName(), evd.GetNumber())}
+			}
+			vals[evd.GetNumber()] = evd.GetName()
+		}
+	}
+
+	// reserved ranges should not overlap
+	rsvd := make(tagRanges, len(ed.ReservedRange))
+	for i, r := range ed.ReservedRange {
+		n := res.getEnumReservedRangeNode(r)
+		rsvd[i] = tagRange{start: r.GetStart(), end: r.GetEnd(), node: n}
+	}
+	sort.Sort(rsvd)
+	for i := 1; i < len(rsvd); i++ {
+		if rsvd[i].start <= rsvd[i-1].end {
+			return ErrorWithSourcePos{Pos: rsvd[i].node.start(), Underlying: fmt.Errorf("%s: reserved ranges overlap: %d to %d and %d to %d", scope, rsvd[i-1].start, rsvd[i-1].end, rsvd[i].start, rsvd[i].end)}
+		}
+	}
+
+	// now, check that fields don't re-use tags and don't try to use extension
+	// or reserved ranges or reserved names
+	rsvdNames := map[string]struct{}{}
+	for _, n := range ed.ReservedName {
+		rsvdNames[n] = struct{}{}
+	}
+	for _, ev := range ed.Value {
+		evn := res.getEnumValueNode(ev)
+		if _, ok := rsvdNames[ev.GetName()]; ok {
+			return ErrorWithSourcePos{Pos: evn.getName().start(), Underlying: fmt.Errorf("%s: value %s is using a reserved name", scope, ev.GetName())}
+		}
+		// check reserved ranges
+		r := sort.Search(len(rsvd), func(index int) bool { return rsvd[index].end >= ev.GetNumber() })
+		if r < len(rsvd) && rsvd[r].start <= ev.GetNumber() {
+			return ErrorWithSourcePos{Pos: evn.getNumber().start(), Underlying: fmt.Errorf("%s: value %s is using number %d which is in reserved range %d to %d", scope, ev.GetName(), ev.GetNumber(), rsvd[r].start, rsvd[r].end)}
+		}
+	}
+
+	return nil
+}
+
+func validateField(res *parseResult, isProto3 bool, prefix string, fld *dpb.FieldDescriptorProto) error {
+	scope := fmt.Sprintf("field %s%s", prefix, fld.GetName())
+
+	node := res.getFieldNode(fld)
+	if isProto3 {
+		if fld.GetType() == dpb.FieldDescriptorProto_TYPE_GROUP {
+			n := node.(*groupNode)
+			return ErrorWithSourcePos{Pos: n.groupKeyword.start(), Underlying: fmt.Errorf("%s: groups are not allowed in proto3", scope)}
+		}
+		if fld.Label != nil && fld.GetLabel() != dpb.FieldDescriptorProto_LABEL_REPEATED {
+			return ErrorWithSourcePos{Pos: node.fieldLabel().start(), Underlying: fmt.Errorf("%s: field has label %v, but proto3 should omit labels other than 'repeated'", scope, fld.GetLabel())}
+		}
+		if index, err := findOption(res, scope, fld.Options.GetUninterpretedOption(), "default"); err != nil {
+			return err
+		} else if index >= 0 {
+			optNode := res.getOptionNode(fld.Options.GetUninterpretedOption()[index])
+			return ErrorWithSourcePos{Pos: optNode.getName().start(), Underlying: fmt.Errorf("%s: default values are not allowed in proto3", scope)}
+		}
+	} else {
+		if fld.Label == nil && fld.OneofIndex == nil {
+			return ErrorWithSourcePos{Pos: node.fieldName().start(), Underlying: fmt.Errorf("%s: field has no label, but proto2 must indicate 'optional' or 'required'", scope)}
+		}
+		if fld.GetExtendee() != "" && fld.Label != nil && fld.GetLabel() == dpb.FieldDescriptorProto_LABEL_REQUIRED {
+			return ErrorWithSourcePos{Pos: node.fieldLabel().start(), Underlying: fmt.Errorf("%s: extension fields cannot be 'required'", scope)}
+		}
+	}
+
+	// finally, set any missing label to optional
+	if fld.Label == nil {
+		fld.Label = dpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum()
+	}
+	return nil
+}
+
+func findOption(res *parseResult, scope string, opts []*dpb.UninterpretedOption, name string) (int, error) {
+	found := -1
+	for i, opt := range opts {
+		if len(opt.Name) != 1 {
+			continue
+		}
+		if opt.Name[0].GetIsExtension() || opt.Name[0].GetNamePart() != name {
+			continue
+		}
+		if found >= 0 {
+			optNode := res.getOptionNode(opt)
+			return -1, ErrorWithSourcePos{Pos: optNode.getName().start(), Underlying: fmt.Errorf("%s: option %s cannot be defined more than once", scope, name)}
+		}
+		found = i
+	}
+	return found, nil
+}
+
+func removeOption(uo []*dpb.UninterpretedOption, indexToRemove int) []*dpb.UninterpretedOption {
+	if indexToRemove == 0 {
+		return uo[1:]
+	} else if int(indexToRemove) == len(uo)-1 {
+		return uo[:len(uo)-1]
+	} else {
+		return append(uo[:indexToRemove], uo[indexToRemove+1:]...)
+	}
+}
+
+type tagRange struct {
+	start int32
+	end   int32
+	node  rangeDecl
+}
+
+type tagRanges []tagRange
+
+func (r tagRanges) Len() int {
+	return len(r)
+}
+
+func (r tagRanges) Less(i, j int) bool {
+	return r[i].start < r[j].start ||
+		(r[i].start == r[j].start && r[i].end < r[j].end)
+}
+
+func (r tagRanges) Swap(i, j int) {
+	r[i], r[j] = r[j], r[i]
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y
new file mode 100644
index 0000000..faf49d9
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y
@@ -0,0 +1,937 @@
+%{
+package protoparse
+
+//lint:file-ignore SA4006 generated parser has unused values
+
+import (
+	"fmt"
+	"math"
+	"unicode"
+
+	"github.com/jhump/protoreflect/desc/internal"
+)
+
+%}
+
+// fields inside this union end up as the fields in a structure known
+// as ${PREFIX}SymType, of which a reference is passed to the lexer.
+%union{
+	file      *fileNode
+	fileDecls []*fileElement
+	syn       *syntaxNode
+	pkg       *packageNode
+	imprt     *importNode
+	msg       *messageNode
+	msgDecls  []*messageElement
+	fld       *fieldNode
+	mapFld    *mapFieldNode
+	grp       *groupNode
+	oo        *oneOfNode
+	ooDecls   []*oneOfElement
+	ext       *extensionRangeNode
+	resvd     *reservedNode
+	en        *enumNode
+	enDecls   []*enumElement
+	env       *enumValueNode
+	extend    *extendNode
+	extDecls  []*extendElement
+	svc       *serviceNode
+	svcDecls  []*serviceElement
+	mtd       *methodNode
+	rpcType   *rpcTypeNode
+	opts      []*optionNode
+	optNm     []*optionNamePartNode
+	rngs      []*rangeNode
+	names     []*stringLiteralNode
+	sl        []valueNode
+	agg       []*aggregateEntryNode
+	aggName   *aggregateNameNode
+	v         valueNode
+	str       *stringLiteralNode
+	i         *negativeIntLiteralNode
+	ui        *intLiteralNode
+	f         *floatLiteralNode
+	id        *identNode
+	b         *basicNode
+	err       error
+}
+
+// any non-terminal which returns a value needs a type, which is
+// really a field name in the above union struct
+%type <file>      file
+%type <syn>       syntax
+%type <fileDecls> fileDecl fileDecls
+%type <imprt>     import
+%type <pkg>       package
+%type <opts>      option fieldOption fieldOptions rpcOption rpcOptions
+%type <optNm>     optionName optionNameRest optionNameComponent
+%type <v>         constant scalarConstant aggregate
+%type <id>        name ident typeIdent keyType
+%type <aggName>   aggName
+%type <i>         negIntLit
+%type <ui>        intLit
+%type <f>         floatLit
+%type <sl>        constantList
+%type <agg>       aggFields aggField aggFieldEntry
+%type <fld>       field oneofField
+%type <oo>        oneof
+%type <grp>       group
+%type <mapFld>    mapField
+%type <msg>       message
+%type <msgDecls>  messageItem messageBody
+%type <ooDecls>   oneofItem oneofBody
+%type <names>     fieldNames
+%type <resvd>     msgReserved enumReserved reservedNames
+%type <rngs>      tagRange tagRanges enumRange enumRanges
+%type <ext>       extensions
+%type <en>        enum
+%type <enDecls>   enumItem enumBody
+%type <env>       enumField
+%type <extend>    extend
+%type <extDecls>  extendItem extendBody
+%type <str>       stringLit
+%type <svc>       service
+%type <svcDecls>  serviceItem serviceBody
+%type <mtd>       rpc
+%type <rpcType>   rpcType
+
+// same for terminals
+%token <str> _STRING_LIT
+%token <ui>  _INT_LIT
+%token <f>   _FLOAT_LIT
+%token <id>  _NAME _FQNAME _TYPENAME
+%token <id>  _SYNTAX _IMPORT _WEAK _PUBLIC _PACKAGE _OPTION _TRUE _FALSE _INF _NAN _REPEATED _OPTIONAL _REQUIRED
+%token <id>  _DOUBLE _FLOAT _INT32 _INT64 _UINT32 _UINT64 _SINT32 _SINT64 _FIXED32 _FIXED64 _SFIXED32 _SFIXED64
+%token <id>  _BOOL _STRING _BYTES _GROUP _ONEOF _MAP _EXTENSIONS _TO _MAX _RESERVED _ENUM _MESSAGE _EXTEND
+%token <id>  _SERVICE _RPC _STREAM _RETURNS
+%token <err> _ERROR
+// we define all of these, even ones that aren't used, to improve error messages
+// so it shows the unexpected symbol instead of showing "$unk"
+%token <b>   '=' ';' ':' '{' '}' '\\' '/' '?' '.' ',' '>' '<' '+' '-' '(' ')' '[' ']' '*' '&' '^' '%' '$' '#' '@' '!' '~' '`'
+
+%%
+
+file : syntax {
+		$$ = &fileNode{syntax: $1}
+		$$.setRange($1, $1)
+		protolex.(*protoLex).res = $$
+	}
+	| fileDecls  {
+		$$ = &fileNode{decls: $1}
+		if len($1) > 0 {
+			$$.setRange($1[0], $1[len($1)-1])
+		}
+		protolex.(*protoLex).res = $$
+	}
+	| syntax fileDecls {
+		$$ = &fileNode{syntax: $1, decls: $2}
+		var end node
+		if len($2) > 0 {
+			end = $2[len($2)-1]
+		} else {
+			end = $1
+		}
+		$$.setRange($1, end)
+		protolex.(*protoLex).res = $$
+	}
+	| {
+	}
+
+fileDecls : fileDecls fileDecl {
+		$$ = append($1, $2...)
+	}
+	| fileDecl
+
+fileDecl : import {
+		$$ = []*fileElement{{imp: $1}}
+	}
+	| package {
+		$$ = []*fileElement{{pkg: $1}}
+	}
+	| option {
+		$$ = []*fileElement{{option: $1[0]}}
+	}
+	| message {
+		$$ = []*fileElement{{message: $1}}
+	}
+	| enum {
+		$$ = []*fileElement{{enum: $1}}
+	}
+	| extend {
+		$$ = []*fileElement{{extend: $1}}
+	}
+	| service {
+		$$ = []*fileElement{{service: $1}}
+	}
+	| ';' {
+		$$ = []*fileElement{{empty: $1}}
+	}
+
+syntax : _SYNTAX '=' stringLit ';' {
+		if $3.val != "proto2" && $3.val != "proto3" {
+			lexError(protolex, $3.start(), "syntax value must be 'proto2' or 'proto3'")
+		}
+		$$ = &syntaxNode{syntax: $3}
+		$$.setRange($1, $4)
+	}
+
+import : _IMPORT stringLit ';' {
+		$$ = &importNode{ name: $2 }
+		$$.setRange($1, $3)
+	}
+	| _IMPORT _WEAK stringLit ';' {
+		$$ = &importNode{ name: $3, weak: true }
+		$$.setRange($1, $4)
+	}
+	| _IMPORT _PUBLIC stringLit ';' {
+		$$ = &importNode{ name: $3, public: true }
+		$$.setRange($1, $4)
+	}
+
+package : _PACKAGE ident ';' {
+		$$ = &packageNode{name: $2}
+		$$.setRange($1, $3)
+	}
+
+ident : name
+	| _FQNAME
+
+option : _OPTION optionName '=' constant ';' {
+		n := &optionNameNode{parts: $2}
+		n.setRange($2[0], $2[len($2)-1])
+		o := &optionNode{name: n, val: $4}
+		o.setRange($1, $5)
+		$$ = []*optionNode{o}
+	}
+
+optionName : ident {
+		$$ = toNameParts($1, 0)
+	}
+	| '(' typeIdent ')' {
+		p := &optionNamePartNode{text: $2, isExtension: true}
+		p.setRange($1, $3)
+		$$ = []*optionNamePartNode{p}
+	}
+	| '(' typeIdent ')' optionNameRest {
+		p := &optionNamePartNode{text: $2, isExtension: true}
+		p.setRange($1, $3)
+		ps := make([]*optionNamePartNode, 1, len($4)+1)
+		ps[0] = p
+		$$ = append(ps, $4...)
+	}
+
+optionNameRest : optionNameComponent
+	| optionNameComponent optionNameRest {
+		$$ = append($1, $2...)
+	}
+
+optionNameComponent : _TYPENAME {
+		$$ = toNameParts($1, 1 /* exclude leading dot */)
+	}
+	| '.' '(' typeIdent ')' {
+		p := &optionNamePartNode{text: $3, isExtension: true}
+		p.setRange($2, $4)
+		$$ = []*optionNamePartNode{p}
+	}
+
+constant : scalarConstant
+	| aggregate
+
+scalarConstant : stringLit {
+		$$ = $1
+	}
+	| intLit {
+		$$ = $1
+	}
+	| negIntLit {
+		$$ = $1
+	}
+	| floatLit {
+		$$ = $1
+	}
+	| name {
+		if $1.val == "true" {
+			$$ = &boolLiteralNode{basicNode: $1.basicNode, val: true}
+		} else if $1.val == "false" {
+			$$ = &boolLiteralNode{basicNode: $1.basicNode, val: false}
+		} else if $1.val == "inf" {
+			f := &floatLiteralNode{val: math.Inf(1)}
+			f.setRange($1, $1)
+			$$ = f
+		} else if $1.val == "nan" {
+			f := &floatLiteralNode{val: math.NaN()}
+			f.setRange($1, $1)
+			$$ = f
+		} else {
+			$$ = $1
+		}
+	}
+
+intLit : _INT_LIT
+	| '+' _INT_LIT {
+		$$ = $2
+	}
+
+negIntLit : '-' _INT_LIT {
+		if $2.val > math.MaxInt64 + 1 {
+			lexError(protolex, $2.start(), fmt.Sprintf("numeric constant %d would underflow (allowed range is %d to %d)", $2.val, int64(math.MinInt64), int64(math.MaxInt64)))
+		}
+		$$ = &negativeIntLiteralNode{val: -int64($2.val)}
+		$$.setRange($1, $2)
+	}
+
+floatLit : _FLOAT_LIT
+	| '-' _FLOAT_LIT {
+		$$ = &floatLiteralNode{val: -$2.val}
+		$$.setRange($1, $2)
+	}
+	| '+' _FLOAT_LIT {
+		$$ = &floatLiteralNode{val: $2.val}
+		$$.setRange($1, $2)
+	}
+	| '+' _INF {
+		$$ = &floatLiteralNode{val: math.Inf(1)}
+		$$.setRange($1, $2)
+	}
+	| '-' _INF {
+		$$ = &floatLiteralNode{val: math.Inf(-1)}
+		$$.setRange($1, $2)
+	}
+
+stringLit : _STRING_LIT
+    | stringLit _STRING_LIT {
+        $$ = &stringLiteralNode{val: $1.val + $2.val}
+        $$.setRange($1, $2)
+    }
+
+aggregate : '{' aggFields '}' {
+		a := &aggregateLiteralNode{elements: $2}
+		a.setRange($1, $3)
+		$$ = a
+	}
+
+aggFields : aggField
+	| aggFields aggField {
+		$$ = append($1, $2...)
+	}
+	| {
+		$$ = nil
+	}
+
+aggField : aggFieldEntry
+	| aggFieldEntry ',' {
+		$$ = $1
+	}
+	| aggFieldEntry ';' {
+		$$ = $1
+	}
+
+aggFieldEntry : aggName ':' scalarConstant {
+		a := &aggregateEntryNode{name: $1, val: $3}
+		a.setRange($1, $3)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName ':' '[' ']' {
+		s := &sliceLiteralNode{}
+		s.setRange($3, $4)
+		a := &aggregateEntryNode{name: $1, val: s}
+		a.setRange($1, $4)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName ':' '[' constantList ']' {
+		s := &sliceLiteralNode{elements: $4}
+		s.setRange($3, $5)
+		a := &aggregateEntryNode{name: $1, val: s}
+		a.setRange($1, $5)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName ':' aggregate {
+		a := &aggregateEntryNode{name: $1, val: $3}
+		a.setRange($1, $3)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName aggregate {
+		a := &aggregateEntryNode{name: $1, val: $2}
+		a.setRange($1, $2)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName ':' '<' aggFields '>' {
+		s := &aggregateLiteralNode{elements: $4}
+		s.setRange($3, $5)
+		a := &aggregateEntryNode{name: $1, val: s}
+		a.setRange($1, $5)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName '<' aggFields '>' {
+		s := &aggregateLiteralNode{elements: $3}
+		s.setRange($2, $4)
+		a := &aggregateEntryNode{name: $1, val: s}
+		a.setRange($1, $4)
+		$$ = []*aggregateEntryNode{a}
+	}
+
+aggName : name {
+		$$ = &aggregateNameNode{name: $1}
+		$$.setRange($1, $1)
+	}
+	| '[' ident ']' {
+		$$ = &aggregateNameNode{name: $2, isExtension: true}
+		$$.setRange($1, $3)
+	}
+
+constantList : constant {
+		$$ = []valueNode{$1}
+	}
+	| constantList ',' constant {
+		$$ = append($1, $3)
+	}
+	| constantList ';' constant {
+		$$ = append($1, $3)
+	}
+	| '<' aggFields '>' {
+		s := &aggregateLiteralNode{elements: $2}
+		s.setRange($1, $3)
+		$$ = []valueNode{s}
+	}
+	| constantList ','  '<' aggFields '>' {
+		s := &aggregateLiteralNode{elements: $4}
+		s.setRange($3, $5)
+		$$ = append($1, s)
+	}
+	| constantList ';'  '<' aggFields '>' {
+		s := &aggregateLiteralNode{elements: $4}
+		s.setRange($3, $5)
+		$$ = append($1, s)
+	}
+
+typeIdent : ident
+	| _TYPENAME
+
+field : _REQUIRED typeIdent name '=' _INT_LIT ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode, required: true}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5}
+		$$.setRange($1, $6)
+	}
+	| _OPTIONAL typeIdent name '=' _INT_LIT ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5}
+		$$.setRange($1, $6)
+	}
+	| _REPEATED typeIdent name '=' _INT_LIT ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode, repeated: true}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5}
+		$$.setRange($1, $6)
+	}
+	| typeIdent name '=' _INT_LIT ';' {
+		checkTag(protolex, $4.start(), $4.val)
+		$$ = &fieldNode{fldType: $1, name: $2, tag: $4}
+		$$.setRange($1, $5)
+	}
+	| _REQUIRED typeIdent name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode, required: true}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5, options: $7}
+		$$.setRange($1, $9)
+	}
+	| _OPTIONAL typeIdent name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5, options: $7}
+		$$.setRange($1, $9)
+	}
+	| _REPEATED typeIdent name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode, repeated: true}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5, options: $7}
+		$$.setRange($1, $9)
+	}
+	| typeIdent name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $4.start(), $4.val)
+		$$ = &fieldNode{fldType: $1, name: $2, tag: $4, options: $6}
+		$$.setRange($1, $8)
+	}
+
+fieldOptions : fieldOptions ',' fieldOption {
+		$$ = append($1, $3...)
+	}
+	| fieldOption
+
+fieldOption: optionName '=' constant {
+		n := &optionNameNode{parts: $1}
+		n.setRange($1[0], $1[len($1)-1])
+		o := &optionNode{name: n, val: $3}
+		o.setRange($1[0], $3)
+		$$ = []*optionNode{o}
+	}
+
+group : _REQUIRED _GROUP name '=' _INT_LIT '{' messageBody '}' {
+		checkTag(protolex, $5.start(), $5.val)
+		if !unicode.IsUpper(rune($3.val[0])) {
+			lexError(protolex, $3.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", $3.val))
+		}
+		lbl := &labelNode{basicNode: $1.basicNode, required: true}
+		$$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, decls: $7}
+		$$.setRange($1, $8)
+	}
+	| _OPTIONAL _GROUP name '=' _INT_LIT '{' messageBody '}' {
+		checkTag(protolex, $5.start(), $5.val)
+		if !unicode.IsUpper(rune($3.val[0])) {
+			lexError(protolex, $3.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", $3.val))
+		}
+		lbl := &labelNode{basicNode: $1.basicNode}
+		$$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, decls: $7}
+		$$.setRange($1, $8)
+	}
+	| _REPEATED _GROUP name '=' _INT_LIT '{' messageBody '}' {
+		checkTag(protolex, $5.start(), $5.val)
+		if !unicode.IsUpper(rune($3.val[0])) {
+			lexError(protolex, $3.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", $3.val))
+		}
+		lbl := &labelNode{basicNode: $1.basicNode, repeated: true}
+		$$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, decls: $7}
+		$$.setRange($1, $8)
+	}
+
+oneof : _ONEOF name '{' oneofBody '}' {
+		c := 0
+		for _, el := range $4 {
+			if el.field != nil {
+				c++
+			}
+		}
+		if c == 0 {
+			lexError(protolex, $1.start(), "oneof must contain at least one field")
+		}
+		$$ = &oneOfNode{name: $2, decls: $4}
+		$$.setRange($1, $5)
+	}
+
+oneofBody : oneofBody oneofItem {
+		$$ = append($1, $2...)
+	}
+	| oneofItem
+	| {
+		$$ = nil
+	}
+
+oneofItem : option {
+		$$ = []*oneOfElement{{option: $1[0]}}
+	}
+	| oneofField {
+		$$ = []*oneOfElement{{field: $1}}
+	}
+	| ';' {
+		$$ = []*oneOfElement{{empty: $1}}
+	}
+
+oneofField : typeIdent name '=' _INT_LIT ';' {
+		checkTag(protolex, $4.start(), $4.val)
+		$$ = &fieldNode{fldType: $1, name: $2, tag: $4}
+		$$.setRange($1, $5)
+	}
+	| typeIdent name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $4.start(), $4.val)
+		$$ = &fieldNode{fldType: $1, name: $2, tag: $4, options: $6}
+		$$.setRange($1, $8)
+	}
+
+mapField : _MAP '<' keyType ',' typeIdent '>' name '=' _INT_LIT ';' {
+		checkTag(protolex, $9.start(), $9.val)
+		$$ = &mapFieldNode{mapKeyword: $1, keyType: $3, valueType: $5, name: $7, tag: $9}
+		$$.setRange($1, $10)
+	}
+	| _MAP '<' keyType ',' typeIdent '>' name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $9.start(), $9.val)
+		$$ = &mapFieldNode{mapKeyword: $1, keyType: $3, valueType: $5, name: $7, tag: $9, options: $11}
+		$$.setRange($1, $13)
+	}
+
+keyType : _INT32
+	| _INT64
+	| _UINT32
+	| _UINT64
+	| _SINT32
+	| _SINT64
+	| _FIXED32
+	| _FIXED64
+	| _SFIXED32
+	| _SFIXED64
+	| _BOOL
+	| _STRING
+
+extensions : _EXTENSIONS tagRanges ';' {
+		$$ = &extensionRangeNode{ranges: $2}
+		$$.setRange($1, $3)
+	}
+	| _EXTENSIONS tagRanges '[' fieldOptions ']' ';' {
+		$$ = &extensionRangeNode{ranges: $2, options: $4}
+		$$.setRange($1, $6)
+	}
+
+tagRanges : tagRanges ',' tagRange {
+		$$ = append($1, $3...)
+	}
+	| tagRange
+
+tagRange : _INT_LIT {
+		if $1.val > internal.MaxTag {
+			lexError(protolex, $1.start(), fmt.Sprintf("range includes out-of-range tag: %d (should be between 0 and %d)", $1.val, internal.MaxTag))
+		}
+		r := &rangeNode{stNode: $1, enNode: $1, st: int32($1.val), en: int32($1.val)}
+		r.setRange($1, $1)
+		$$ = []*rangeNode{r}
+	}
+	| _INT_LIT _TO _INT_LIT {
+		if $1.val > internal.MaxTag {
+			lexError(protolex, $1.start(), fmt.Sprintf("range start is out-of-range tag: %d (should be between 0 and %d)", $1.val, internal.MaxTag))
+		}
+		if $3.val > internal.MaxTag {
+			lexError(protolex, $3.start(), fmt.Sprintf("range end is out-of-range tag: %d (should be between 0 and %d)", $3.val, internal.MaxTag))
+		}
+		if $1.val > $3.val {
+			lexError(protolex, $1.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", $1.val, $3.val))
+		}
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: int32($3.val)}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+	| _INT_LIT _TO _MAX {
+		if $1.val > internal.MaxTag {
+			lexError(protolex, $1.start(), fmt.Sprintf("range start is out-of-range tag: %d (should be between 0 and %d)", $1.val, internal.MaxTag))
+		}
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: internal.MaxTag}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+
+enumRanges : enumRanges ',' enumRange {
+		$$ = append($1, $3...)
+	}
+	| enumRange
+
+enumRange : _INT_LIT {
+		checkUint64InInt32Range(protolex, $1.start(), $1.val)
+		r := &rangeNode{stNode: $1, enNode: $1, st: int32($1.val), en: int32($1.val)}
+		r.setRange($1, $1)
+		$$ = []*rangeNode{r}
+	}
+	| negIntLit {
+		checkInt64InInt32Range(protolex, $1.start(), $1.val)
+		r := &rangeNode{stNode: $1, enNode: $1, st: int32($1.val), en: int32($1.val)}
+		r.setRange($1, $1)
+		$$ = []*rangeNode{r}
+	}
+	| _INT_LIT _TO _INT_LIT {
+		checkUint64InInt32Range(protolex, $1.start(), $1.val)
+		checkUint64InInt32Range(protolex, $3.start(), $3.val)
+		if $1.val > $3.val {
+			lexError(protolex, $1.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", $1.val, $3.val))
+		}
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: int32($3.val)}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+	| negIntLit _TO negIntLit {
+		checkInt64InInt32Range(protolex, $1.start(), $1.val)
+		checkInt64InInt32Range(protolex, $3.start(), $3.val)
+		if $1.val > $3.val {
+			lexError(protolex, $1.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", $1.val, $3.val))
+		}
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: int32($3.val)}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+	| negIntLit _TO _INT_LIT {
+		checkInt64InInt32Range(protolex, $1.start(), $1.val)
+		checkUint64InInt32Range(protolex, $3.start(), $3.val)
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: int32($3.val)}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+	| _INT_LIT _TO _MAX {
+		checkUint64InInt32Range(protolex, $1.start(), $1.val)
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: math.MaxInt32}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+	| negIntLit _TO _MAX {
+		checkInt64InInt32Range(protolex, $1.start(), $1.val)
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: math.MaxInt32}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+
+msgReserved : _RESERVED tagRanges ';' {
+		$$ = &reservedNode{ranges: $2}
+		$$.setRange($1, $3)
+	}
+	| reservedNames
+
+enumReserved : _RESERVED enumRanges ';' {
+		$$ = &reservedNode{ranges: $2}
+		$$.setRange($1, $3)
+	}
+	| reservedNames
+
+reservedNames : _RESERVED fieldNames ';' {
+		rsvd := map[string]struct{}{}
+		for _, n := range $2 {
+			if _, ok := rsvd[n.val]; ok {
+				lexError(protolex, n.start(), fmt.Sprintf("name %q is reserved multiple times", n.val))
+				break
+			}
+			rsvd[n.val] = struct{}{}
+		}
+		$$ = &reservedNode{names: $2}
+		$$.setRange($1, $3)
+	}
+
+fieldNames : fieldNames ',' stringLit {
+		$$ = append($1, $3)
+	}
+	| stringLit {
+		$$ = []*stringLiteralNode{$1}
+	}
+
+enum : _ENUM name '{' enumBody '}' {
+		c := 0
+		for _, el := range $4 {
+			if el.value != nil {
+				c++
+			}
+		}
+		if c == 0 {
+			lexError(protolex, $1.start(), "enums must define at least one value")
+		}
+		$$ = &enumNode{name: $2, decls: $4}
+		$$.setRange($1, $5)
+	}
+
+enumBody : enumBody enumItem {
+		$$ = append($1, $2...)
+	}
+	| enumItem
+	| {
+		$$ = nil
+	}
+
+enumItem : option {
+		$$ = []*enumElement{{option: $1[0]}}
+	}
+	| enumField {
+		$$ = []*enumElement{{value: $1}}
+	}
+	| enumReserved {
+		$$ = []*enumElement{{reserved: $1}}
+	}
+	| ';' {
+		$$ = []*enumElement{{empty: $1}}
+	}
+
+enumField : name '=' _INT_LIT ';' {
+		checkUint64InInt32Range(protolex, $3.start(), $3.val)
+		$$ = &enumValueNode{name: $1, numberP: $3}
+		$$.setRange($1, $4)
+	}
+	|  name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkUint64InInt32Range(protolex, $3.start(), $3.val)
+		$$ = &enumValueNode{name: $1, numberP: $3, options: $5}
+		$$.setRange($1, $7)
+	}
+	| name '=' negIntLit ';' {
+		checkInt64InInt32Range(protolex, $3.start(), $3.val)
+		$$ = &enumValueNode{name: $1, numberN: $3}
+		$$.setRange($1, $4)
+	}
+	|  name '=' negIntLit '[' fieldOptions ']' ';' {
+		checkInt64InInt32Range(protolex, $3.start(), $3.val)
+		$$ = &enumValueNode{name: $1, numberN: $3, options: $5}
+		$$.setRange($1, $7)
+	}
+
+message : _MESSAGE name '{' messageBody '}' {
+		$$ = &messageNode{name: $2, decls: $4}
+		$$.setRange($1, $5)
+	}
+
+messageBody : messageBody messageItem {
+		$$ = append($1, $2...)
+	}
+	| messageItem
+	| {
+		$$ = nil
+	}
+
+messageItem : field {
+		$$ = []*messageElement{{field: $1}}
+	}
+	| enum {
+		$$ = []*messageElement{{enum: $1}}
+	}
+	| message {
+		$$ = []*messageElement{{nested: $1}}
+	}
+	| extend {
+		$$ = []*messageElement{{extend: $1}}
+	}
+	| extensions {
+		$$ = []*messageElement{{extensionRange: $1}}
+	}
+	| group {
+		$$ = []*messageElement{{group: $1}}
+	}
+	| option {
+		$$ = []*messageElement{{option: $1[0]}}
+	}
+	| oneof {
+		$$ = []*messageElement{{oneOf: $1}}
+	}
+	| mapField {
+		$$ = []*messageElement{{mapField: $1}}
+	}
+	| msgReserved {
+		$$ = []*messageElement{{reserved: $1}}
+	}
+	| ';' {
+		$$ = []*messageElement{{empty: $1}}
+	}
+
+extend : _EXTEND typeIdent '{' extendBody '}' {
+		c := 0
+		for _, el := range $4 {
+			if el.field != nil || el.group != nil {
+				c++
+			}
+		}
+		if c == 0 {
+			lexError(protolex, $1.start(), "extend sections must define at least one extension")
+		}
+		$$ = &extendNode{extendee: $2, decls: $4}
+		$$.setRange($1, $5)
+	}
+
+extendBody : extendBody extendItem {
+		$$ = append($1, $2...)
+	}
+	| extendItem
+	| {
+		$$ = nil
+	}
+
+extendItem : field {
+		$$ = []*extendElement{{field: $1}}
+	}
+	| group {
+		$$ = []*extendElement{{group: $1}}
+	}
+	| ';' {
+		$$ = []*extendElement{{empty: $1}}
+	}
+
+service : _SERVICE name '{' serviceBody '}' {
+		$$ = &serviceNode{name: $2, decls: $4}
+		$$.setRange($1, $5)
+	}
+
+serviceBody : serviceBody serviceItem {
+		$$ = append($1, $2...)
+	}
+	| serviceItem
+	| {
+		$$ = nil
+	}
+
+// NB: doc suggests support for "stream" declaration, separate from "rpc", but
+// it does not appear to be supported in protoc (doc is likely from grammar for
+// Google-internal version of protoc, with support for streaming stubby)
+serviceItem : option {
+		$$ = []*serviceElement{{option: $1[0]}}
+	}
+	| rpc {
+		$$ = []*serviceElement{{rpc: $1}}
+	}
+	| ';' {
+		$$ = []*serviceElement{{empty: $1}}
+	}
+
+rpc : _RPC name '(' rpcType ')' _RETURNS '(' rpcType ')' ';' {
+		$$ = &methodNode{name: $2, input: $4, output: $8}
+		$$.setRange($1, $10)
+	}
+	| _RPC name '(' rpcType ')' _RETURNS '(' rpcType ')' '{' rpcOptions '}' {
+		$$ = &methodNode{name: $2, input: $4, output: $8, options: $11}
+		$$.setRange($1, $12)
+	}
+
+rpcType : _STREAM typeIdent {
+		$$ = &rpcTypeNode{msgType: $2, streamKeyword: $1}
+		$$.setRange($1, $2)
+	}
+	| typeIdent {
+		$$ = &rpcTypeNode{msgType: $1}
+		$$.setRange($1, $1)
+	}
+
+rpcOptions : rpcOptions rpcOption {
+		$$ = append($1, $2...)
+	}
+	| rpcOption
+	| {
+		$$ = []*optionNode{}
+	}
+
+rpcOption : option {
+		$$ = $1
+	}
+	| ';' {
+		$$ = []*optionNode{}
+	}
+
+name : _NAME
+	| _SYNTAX
+	| _IMPORT
+	| _WEAK
+	| _PUBLIC
+	| _PACKAGE
+	| _OPTION
+	| _TRUE
+	| _FALSE
+	| _INF
+	| _NAN
+	| _REPEATED
+	| _OPTIONAL
+	| _REQUIRED
+	| _DOUBLE
+	| _FLOAT
+	| _INT32
+	| _INT64
+	| _UINT32
+	| _UINT64
+	| _SINT32
+	| _SINT64
+	| _FIXED32
+	| _FIXED64
+	| _SFIXED32
+	| _SFIXED64
+	| _BOOL
+	| _STRING
+	| _BYTES
+	| _GROUP
+	| _ONEOF
+	| _MAP
+	| _EXTENSIONS
+	| _TO
+	| _MAX
+	| _RESERVED
+	| _ENUM
+	| _MESSAGE
+	| _EXTEND
+	| _SERVICE
+	| _RPC
+	| _STREAM
+	| _RETURNS
+
+%%
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y.go
new file mode 100644
index 0000000..6b8a4e6
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y.go
@@ -0,0 +1,2093 @@
+// Code generated by goyacc -o proto.y.go -p proto proto.y. DO NOT EDIT.
+
+//line proto.y:2
+package protoparse
+
+import __yyfmt__ "fmt"
+
+//line proto.y:2
+
+//lint:file-ignore SA4006 generated parser has unused values
+
+import (
+	"fmt"
+	"math"
+	"unicode"
+
+	"github.com/jhump/protoreflect/desc/internal"
+)
+
+//line proto.y:18
+type protoSymType struct {
+	yys       int
+	file      *fileNode
+	fileDecls []*fileElement
+	syn       *syntaxNode
+	pkg       *packageNode
+	imprt     *importNode
+	msg       *messageNode
+	msgDecls  []*messageElement
+	fld       *fieldNode
+	mapFld    *mapFieldNode
+	grp       *groupNode
+	oo        *oneOfNode
+	ooDecls   []*oneOfElement
+	ext       *extensionRangeNode
+	resvd     *reservedNode
+	en        *enumNode
+	enDecls   []*enumElement
+	env       *enumValueNode
+	extend    *extendNode
+	extDecls  []*extendElement
+	svc       *serviceNode
+	svcDecls  []*serviceElement
+	mtd       *methodNode
+	rpcType   *rpcTypeNode
+	opts      []*optionNode
+	optNm     []*optionNamePartNode
+	rngs      []*rangeNode
+	names     []*stringLiteralNode
+	sl        []valueNode
+	agg       []*aggregateEntryNode
+	aggName   *aggregateNameNode
+	v         valueNode
+	str       *stringLiteralNode
+	i         *negativeIntLiteralNode
+	ui        *intLiteralNode
+	f         *floatLiteralNode
+	id        *identNode
+	b         *basicNode
+	err       error
+}
+
+const _STRING_LIT = 57346
+const _INT_LIT = 57347
+const _FLOAT_LIT = 57348
+const _NAME = 57349
+const _FQNAME = 57350
+const _TYPENAME = 57351
+const _SYNTAX = 57352
+const _IMPORT = 57353
+const _WEAK = 57354
+const _PUBLIC = 57355
+const _PACKAGE = 57356
+const _OPTION = 57357
+const _TRUE = 57358
+const _FALSE = 57359
+const _INF = 57360
+const _NAN = 57361
+const _REPEATED = 57362
+const _OPTIONAL = 57363
+const _REQUIRED = 57364
+const _DOUBLE = 57365
+const _FLOAT = 57366
+const _INT32 = 57367
+const _INT64 = 57368
+const _UINT32 = 57369
+const _UINT64 = 57370
+const _SINT32 = 57371
+const _SINT64 = 57372
+const _FIXED32 = 57373
+const _FIXED64 = 57374
+const _SFIXED32 = 57375
+const _SFIXED64 = 57376
+const _BOOL = 57377
+const _STRING = 57378
+const _BYTES = 57379
+const _GROUP = 57380
+const _ONEOF = 57381
+const _MAP = 57382
+const _EXTENSIONS = 57383
+const _TO = 57384
+const _MAX = 57385
+const _RESERVED = 57386
+const _ENUM = 57387
+const _MESSAGE = 57388
+const _EXTEND = 57389
+const _SERVICE = 57390
+const _RPC = 57391
+const _STREAM = 57392
+const _RETURNS = 57393
+const _ERROR = 57394
+
+var protoToknames = [...]string{
+	"$end",
+	"error",
+	"$unk",
+	"_STRING_LIT",
+	"_INT_LIT",
+	"_FLOAT_LIT",
+	"_NAME",
+	"_FQNAME",
+	"_TYPENAME",
+	"_SYNTAX",
+	"_IMPORT",
+	"_WEAK",
+	"_PUBLIC",
+	"_PACKAGE",
+	"_OPTION",
+	"_TRUE",
+	"_FALSE",
+	"_INF",
+	"_NAN",
+	"_REPEATED",
+	"_OPTIONAL",
+	"_REQUIRED",
+	"_DOUBLE",
+	"_FLOAT",
+	"_INT32",
+	"_INT64",
+	"_UINT32",
+	"_UINT64",
+	"_SINT32",
+	"_SINT64",
+	"_FIXED32",
+	"_FIXED64",
+	"_SFIXED32",
+	"_SFIXED64",
+	"_BOOL",
+	"_STRING",
+	"_BYTES",
+	"_GROUP",
+	"_ONEOF",
+	"_MAP",
+	"_EXTENSIONS",
+	"_TO",
+	"_MAX",
+	"_RESERVED",
+	"_ENUM",
+	"_MESSAGE",
+	"_EXTEND",
+	"_SERVICE",
+	"_RPC",
+	"_STREAM",
+	"_RETURNS",
+	"_ERROR",
+	"'='",
+	"';'",
+	"':'",
+	"'{'",
+	"'}'",
+	"'\\\\'",
+	"'/'",
+	"'?'",
+	"'.'",
+	"','",
+	"'>'",
+	"'<'",
+	"'+'",
+	"'-'",
+	"'('",
+	"')'",
+	"'['",
+	"']'",
+	"'*'",
+	"'&'",
+	"'^'",
+	"'%'",
+	"'$'",
+	"'#'",
+	"'@'",
+	"'!'",
+	"'~'",
+	"'`'",
+}
+var protoStatenames = [...]string{}
+
+const protoEofCode = 1
+const protoErrCode = 2
+const protoInitialStackSize = 16
+
+//line proto.y:937
+
+//line yacctab:1
+var protoExca = [...]int{
+	-1, 1,
+	1, -1,
+	-2, 0,
+}
+
+const protoPrivate = 57344
+
+const protoLast = 2050
+
+var protoAct = [...]int{
+
+	120, 8, 288, 8, 8, 386, 264, 80, 128, 113,
+	159, 160, 265, 271, 103, 196, 185, 112, 100, 101,
+	29, 171, 8, 28, 75, 119, 99, 114, 79, 153,
+	137, 148, 266, 184, 24, 139, 306, 255, 77, 78,
+	319, 82, 306, 83, 389, 86, 87, 306, 318, 74,
+	378, 306, 98, 306, 306, 363, 317, 306, 306, 361,
+	306, 359, 351, 222, 379, 338, 337, 366, 307, 328,
+	377, 224, 325, 322, 304, 280, 278, 286, 223, 380,
+	315, 356, 367, 197, 329, 90, 243, 326, 323, 305,
+	281, 279, 297, 140, 111, 154, 27, 197, 249, 214,
+	209, 106, 188, 336, 246, 276, 241, 330, 240, 211,
+	105, 173, 245, 144, 242, 287, 224, 208, 381, 150,
+	382, 149, 176, 146, 327, 207, 324, 163, 16, 226,
+	94, 93, 92, 91, 177, 179, 181, 16, 199, 140,
+	79, 75, 85, 392, 199, 383, 368, 199, 374, 183,
+	78, 77, 373, 154, 16, 187, 191, 372, 199, 144,
+	198, 365, 157, 174, 85, 191, 74, 156, 355, 146,
+	189, 206, 212, 150, 193, 149, 388, 354, 204, 201,
+	163, 210, 203, 14, 333, 158, 15, 16, 157, 85,
+	85, 88, 97, 156, 213, 16, 202, 335, 215, 216,
+	217, 218, 219, 220, 308, 262, 261, 4, 14, 244,
+	260, 15, 16, 376, 96, 259, 258, 18, 17, 19,
+	20, 257, 254, 256, 221, 339, 13, 272, 252, 194,
+	105, 75, 163, 248, 388, 275, 250, 390, 283, 95,
+	84, 267, 18, 17, 19, 20, 89, 23, 247, 225,
+	352, 13, 268, 303, 168, 169, 27, 186, 290, 302,
+	198, 282, 277, 285, 295, 301, 206, 170, 300, 5,
+	299, 272, 105, 22, 163, 163, 284, 117, 11, 275,
+	11, 11, 165, 166, 310, 312, 313, 75, 314, 75,
+	269, 22, 27, 155, 298, 167, 311, 186, 316, 11,
+	25, 26, 263, 168, 293, 320, 85, 206, 27, 152,
+	12, 147, 331, 75, 75, 163, 163, 3, 141, 332,
+	21, 115, 10, 138, 10, 10, 118, 195, 142, 105,
+	345, 75, 206, 347, 75, 123, 349, 75, 190, 105,
+	105, 163, 346, 10, 270, 348, 116, 9, 350, 9,
+	9, 122, 357, 121, 358, 273, 176, 353, 176, 369,
+	176, 334, 163, 161, 163, 290, 292, 104, 9, 206,
+	206, 340, 342, 102, 375, 75, 162, 227, 163, 163,
+	172, 385, 7, 387, 6, 2, 387, 384, 75, 1,
+	0, 391, 27, 107, 110, 31, 0, 0, 32, 33,
+	34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+	0, 0, 0, 0, 106, 0, 0, 0, 0, 0,
+	0, 0, 294, 108, 109, 0, 0, 0, 291, 27,
+	107, 110, 31, 0, 0, 32, 33, 34, 35, 36,
+	37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
+	47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+	57, 58, 59, 60, 61, 62, 63, 64, 65, 66,
+	67, 68, 69, 70, 71, 72, 73, 0, 0, 0,
+	0, 106, 0, 0, 0, 0, 0, 0, 0, 253,
+	108, 109, 0, 0, 251, 27, 107, 110, 31, 0,
+	0, 32, 33, 34, 35, 36, 37, 38, 39, 40,
+	41, 42, 43, 44, 45, 46, 47, 48, 49, 50,
+	51, 52, 53, 54, 55, 56, 57, 58, 59, 60,
+	61, 62, 63, 64, 65, 66, 67, 68, 69, 70,
+	71, 72, 73, 0, 0, 0, 0, 106, 0, 0,
+	0, 0, 0, 0, 0, 343, 108, 109, 27, 107,
+	110, 31, 0, 0, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	106, 0, 0, 0, 0, 0, 0, 0, 341, 108,
+	109, 27, 107, 110, 31, 0, 0, 32, 33, 34,
+	35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+	45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+	55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+	65, 66, 67, 68, 69, 70, 71, 72, 73, 0,
+	0, 0, 0, 106, 0, 0, 0, 0, 0, 0,
+	0, 31, 108, 109, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 371, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 370, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 344, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 321, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 296, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 205, 0, 0, 0, 0, 0, 0, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 228, 229, 230, 231,
+	232, 233, 234, 235, 236, 237, 238, 239, 0, 0,
+	0, 31, 30, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+	0, 76, 31, 30, 81, 32, 33, 34, 35, 36,
+	133, 38, 39, 40, 41, 127, 126, 125, 45, 46,
+	47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+	57, 58, 59, 60, 134, 135, 132, 64, 65, 136,
+	129, 130, 131, 70, 71, 72, 73, 0, 0, 124,
+	0, 0, 364, 31, 30, 81, 32, 33, 34, 35,
+	36, 133, 38, 39, 40, 41, 127, 126, 125, 45,
+	46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
+	56, 57, 58, 59, 60, 134, 135, 132, 64, 65,
+	136, 129, 130, 131, 70, 71, 72, 73, 0, 0,
+	124, 0, 0, 362, 31, 30, 81, 32, 33, 34,
+	35, 36, 133, 38, 39, 40, 41, 127, 126, 125,
+	45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+	55, 56, 57, 58, 59, 60, 134, 135, 132, 64,
+	65, 136, 129, 130, 131, 70, 71, 72, 73, 0,
+	0, 124, 0, 0, 360, 31, 30, 81, 32, 33,
+	34, 35, 36, 133, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+	0, 0, 274, 0, 0, 309, 31, 30, 81, 32,
+	33, 34, 35, 36, 37, 38, 39, 40, 41, 127,
+	126, 125, 45, 46, 47, 48, 49, 50, 51, 52,
+	53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
+	63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+	73, 0, 0, 151, 0, 0, 200, 31, 30, 81,
+	32, 33, 34, 35, 36, 133, 38, 39, 40, 41,
+	127, 126, 125, 45, 46, 47, 48, 49, 50, 51,
+	52, 53, 54, 55, 56, 57, 58, 59, 60, 134,
+	135, 132, 64, 65, 136, 129, 130, 131, 70, 71,
+	72, 73, 0, 0, 124, 31, 0, 175, 32, 33,
+	34, 35, 36, 133, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+	64, 65, 145, 67, 68, 69, 70, 71, 72, 73,
+	0, 0, 143, 0, 0, 192, 31, 30, 81, 32,
+	33, 34, 35, 36, 133, 38, 39, 40, 41, 127,
+	126, 125, 45, 46, 47, 48, 49, 50, 51, 52,
+	53, 54, 55, 56, 57, 58, 59, 60, 134, 135,
+	132, 64, 65, 136, 129, 130, 131, 70, 71, 72,
+	73, 0, 0, 124, 31, 30, 81, 32, 33, 34,
+	35, 36, 133, 38, 39, 40, 41, 42, 43, 44,
+	45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+	55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+	65, 66, 67, 68, 69, 70, 71, 72, 73, 0,
+	0, 274, 31, 30, 81, 32, 33, 34, 35, 36,
+	37, 38, 39, 40, 41, 127, 126, 125, 45, 46,
+	47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+	57, 58, 59, 60, 61, 62, 63, 64, 65, 66,
+	67, 68, 69, 70, 71, 72, 73, 31, 0, 151,
+	32, 33, 34, 35, 36, 133, 38, 39, 40, 41,
+	42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+	52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+	62, 63, 64, 65, 145, 67, 68, 69, 70, 71,
+	72, 73, 0, 0, 143, 31, 30, 81, 32, 33,
+	34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 289, 73,
+	31, 30, 81, 32, 33, 34, 35, 36, 37, 38,
+	39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+	49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+	59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
+	69, 70, 71, 72, 73, 31, 30, 81, 32, 33,
+	34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 182, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+	31, 30, 81, 32, 33, 34, 35, 36, 37, 38,
+	39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+	49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+	59, 180, 61, 62, 63, 64, 65, 66, 67, 68,
+	69, 70, 71, 72, 73, 31, 30, 81, 32, 33,
+	34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 178, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+	31, 30, 0, 32, 33, 34, 35, 36, 37, 38,
+	39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+	49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+	59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
+	69, 70, 71, 72, 73, 31, 0, 0, 32, 33,
+	34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+}
+var protoPact = [...]int{
+
+	197, -1000, 172, 172, 194, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, 288, 1953, 1124, 1998, 1998, 1773,
+	1998, 172, -1000, 304, 186, 304, 304, -1000, 137, -1000,
+	-1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, 193, -1000, 1773, 77, 76, 75,
+	-1000, -1000, 74, 185, -1000, -1000, 160, 138, -1000, 647,
+	26, 1539, 1680, 1635, 113, -1000, -1000, -1000, 131, -1000,
+	-1000, 302, -1000, -1000, -1000, -1000, 1064, -1000, 277, 249,
+	-1000, 102, 1440, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, -1000, 1908, 1863, 1818, 1998, 1998,
+	1998, 1773, 292, 1124, 1998, 38, 252, -1000, 1488, -1000,
+	-1000, -1000, -1000, -1000, 176, 92, -1000, 1389, -1000, -1000,
+	-1000, -1000, 139, -1000, -1000, -1000, -1000, 1998, -1000, 1004,
+	-1000, 63, 45, -1000, 1953, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, 102, -1000, 32, -1000, -1000, 1998, 1998, 1998,
+	1998, 1998, 1998, 171, 9, -1000, 207, 73, 1091, 54,
+	52, 302, -1000, -1000, 81, 50, -1000, 206, 191, 298,
+	-1000, -1000, -1000, -1000, 31, -1000, -1000, -1000, -1000, 455,
+	-1000, 1064, -33, -1000, 1773, 168, 163, 162, 157, 153,
+	152, 297, -1000, 1124, 292, 247, 1587, 43, -1000, -1000,
+	-1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, 304, 22, 21, 78, -1000, 233, 72, 1728,
+	-1000, 388, -1000, 1064, 944, -1000, 24, 289, 265, 263,
+	260, 254, 248, 20, -2, -1000, 151, -1000, -1000, -1000,
+	1338, -1000, -1000, -1000, -1000, 1998, 1773, 302, -1000, 1124,
+	-1000, 1124, -1000, -1000, -1000, -1000, -1000, -1000, 12, 1773,
+	-1000, -1000, -14, -1000, 1064, 884, -1000, -1000, 19, 70,
+	18, 68, 15, 51, -1000, 1124, 1124, 130, 647, -1000,
+	-1000, 144, 40, -4, -5, 174, -1000, -1000, 584, 521,
+	824, -1000, -1000, 1124, 1539, -1000, 1124, 1539, -1000, 1124,
+	1539, -8, -1000, -1000, -1000, 245, 1998, 123, 114, 14,
+	-1000, 1064, -1000, 1064, -1000, -9, 1287, -11, 1236, -15,
+	1185, 107, 13, 93, -1000, -1000, 1728, 764, 704, 103,
+	-1000, 98, -1000, 94, -1000, -1000, -1000, 1124, 208, 2,
+	-1000, -1000, -1000, -1000, -1000, -20, 10, 64, 91, -1000,
+	1124, -1000, 122, -1000, -26, 180, -1000, -1000, -1000, 89,
+	-1000, -1000, -1000,
+}
+var protoPgo = [...]int{
+
+	0, 389, 385, 269, 317, 384, 382, 0, 12, 6,
+	5, 381, 32, 21, 380, 52, 26, 18, 20, 7,
+	8, 377, 376, 14, 373, 367, 366, 10, 11, 363,
+	27, 355, 353, 25, 351, 346, 9, 17, 13, 344,
+	338, 335, 328, 30, 16, 33, 15, 327, 326, 321,
+	35, 323, 318, 277, 31, 311, 19, 310, 29, 309,
+	293, 2,
+}
+var protoR1 = [...]int{
+
+	0, 1, 1, 1, 1, 4, 4, 3, 3, 3,
+	3, 3, 3, 3, 3, 2, 5, 5, 5, 6,
+	19, 19, 7, 12, 12, 12, 13, 13, 14, 14,
+	15, 15, 16, 16, 16, 16, 16, 24, 24, 23,
+	25, 25, 25, 25, 25, 56, 56, 17, 27, 27,
+	27, 28, 28, 28, 29, 29, 29, 29, 29, 29,
+	29, 22, 22, 26, 26, 26, 26, 26, 26, 20,
+	20, 30, 30, 30, 30, 30, 30, 30, 30, 9,
+	9, 8, 33, 33, 33, 32, 39, 39, 39, 38,
+	38, 38, 31, 31, 34, 34, 21, 21, 21, 21,
+	21, 21, 21, 21, 21, 21, 21, 21, 48, 48,
+	45, 45, 44, 44, 44, 47, 47, 46, 46, 46,
+	46, 46, 46, 46, 41, 41, 42, 42, 43, 40,
+	40, 49, 51, 51, 51, 50, 50, 50, 50, 52,
+	52, 52, 52, 35, 37, 37, 37, 36, 36, 36,
+	36, 36, 36, 36, 36, 36, 36, 36, 53, 55,
+	55, 55, 54, 54, 54, 57, 59, 59, 59, 58,
+	58, 58, 60, 60, 61, 61, 11, 11, 11, 10,
+	10, 18, 18, 18, 18, 18, 18, 18, 18, 18,
+	18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
+	18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
+	18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
+	18, 18, 18, 18,
+}
+var protoR2 = [...]int{
+
+	0, 1, 1, 2, 0, 2, 1, 1, 1, 1,
+	1, 1, 1, 1, 1, 4, 3, 4, 4, 3,
+	1, 1, 5, 1, 3, 4, 1, 2, 1, 4,
+	1, 1, 1, 1, 1, 1, 1, 1, 2, 2,
+	1, 2, 2, 2, 2, 1, 2, 3, 1, 2,
+	0, 1, 2, 2, 3, 4, 5, 3, 2, 5,
+	4, 1, 3, 1, 3, 3, 3, 5, 5, 1,
+	1, 6, 6, 6, 5, 9, 9, 9, 8, 3,
+	1, 3, 8, 8, 8, 5, 2, 1, 0, 1,
+	1, 1, 5, 8, 10, 13, 1, 1, 1, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 3, 6,
+	3, 1, 1, 3, 3, 3, 1, 1, 1, 3,
+	3, 3, 3, 3, 3, 1, 3, 1, 3, 3,
+	1, 5, 2, 1, 0, 1, 1, 1, 1, 4,
+	7, 4, 7, 5, 2, 1, 0, 1, 1, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 5, 2,
+	1, 0, 1, 1, 1, 5, 2, 1, 0, 1,
+	1, 1, 10, 12, 2, 1, 2, 1, 0, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+	1, 1, 1, 1,
+}
+var protoChk = [...]int{
+
+	-1000, -1, -2, -4, 10, -3, -5, -6, -7, -35,
+	-49, -53, -57, 54, 11, 14, 15, 46, 45, 47,
+	48, -4, -3, 53, -56, 12, 13, 4, -19, -18,
+	8, 7, 10, 11, 12, 13, 14, 15, 16, 17,
+	18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+	28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, -12, -19, 67, -18, -18, -20,
+	-19, 9, -18, -56, 54, 4, -56, -56, 54, 53,
+	-20, 56, 56, 56, 56, 54, 54, 54, -15, -16,
+	-17, -56, -24, -23, -25, -18, 56, 5, 65, 66,
+	6, 68, -37, -36, -30, -49, -35, -53, -48, -33,
+	-7, -32, -34, -41, 54, 22, 21, 20, -20, 45,
+	46, 47, 41, 15, 39, 40, 44, -43, -51, -50,
+	-7, -52, -42, 54, -18, 44, -43, -55, -54, -30,
+	-33, 54, -59, -58, -7, -60, 54, 49, 54, -27,
+	-28, -29, -22, -18, 69, 5, 6, 18, 5, 6,
+	18, -13, -14, 9, 61, 57, -36, -20, 38, -20,
+	38, -20, 38, -18, -45, -44, 5, -18, 64, -45,
+	-40, -56, 57, -50, 53, -47, -46, 5, -23, 66,
+	57, -54, 57, -58, -18, 57, -28, 62, 54, 55,
+	-17, 64, -19, -13, 67, -18, -18, -18, -18, -18,
+	-18, 53, 54, 69, 62, 42, 56, -21, 25, 26,
+	27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
+	54, 54, 62, 5, -23, 62, 54, 42, 42, 67,
+	-16, 69, -17, 64, -27, 70, -20, 53, 53, 53,
+	53, 53, 53, 5, -9, -8, -12, -44, 5, 43,
+	-39, -38, -7, -31, 54, -20, 62, -56, 54, 69,
+	54, 69, -46, 5, 43, -23, 5, 43, -61, 50,
+	-20, 70, -26, -15, 64, -27, 63, 68, 5, 5,
+	5, 5, 5, 5, 54, 69, 62, 70, 53, 57,
+	-38, -18, -20, -9, -9, 68, -20, 70, 62, 54,
+	-27, 63, 54, 69, 56, 54, 69, 56, 54, 69,
+	56, -9, -8, 54, -15, 53, 63, 70, 70, 51,
+	-15, 64, -15, 64, 63, -9, -37, -9, -37, -9,
+	-37, 70, 5, -18, 54, 54, 67, -27, -27, 70,
+	57, 70, 57, 70, 57, 54, 54, 69, 53, -61,
+	63, 63, 54, 54, 54, -9, 5, 68, 70, 54,
+	69, 54, 56, 54, -9, -11, -10, -7, 54, 70,
+	57, -10, 54,
+}
+var protoDef = [...]int{
+
+	4, -2, 1, 2, 0, 6, 7, 8, 9, 10,
+	11, 12, 13, 14, 0, 0, 0, 0, 0, 0,
+	0, 3, 5, 0, 0, 0, 0, 45, 0, 20,
+	21, 181, 182, 183, 184, 185, 186, 187, 188, 189,
+	190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
+	200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
+	210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
+	220, 221, 222, 223, 0, 23, 0, 0, 0, 0,
+	69, 70, 0, 0, 16, 46, 0, 0, 19, 0,
+	0, 146, 134, 161, 168, 15, 17, 18, 0, 30,
+	31, 32, 33, 34, 35, 36, 50, 37, 0, 0,
+	40, 24, 0, 145, 147, 148, 149, 150, 151, 152,
+	153, 154, 155, 156, 157, 0, 0, 0, 0, 0,
+	0, 0, 213, 187, 0, 212, 216, 125, 0, 133,
+	135, 136, 137, 138, 0, 216, 127, 0, 160, 162,
+	163, 164, 0, 167, 169, 170, 171, 0, 22, 0,
+	48, 51, 0, 61, 0, 38, 42, 43, 39, 41,
+	44, 25, 26, 28, 0, 143, 144, 0, 0, 0,
+	0, 0, 0, 0, 0, 111, 112, 0, 0, 0,
+	0, 130, 131, 132, 0, 0, 116, 117, 118, 0,
+	158, 159, 165, 166, 0, 47, 49, 52, 53, 0,
+	58, 50, 0, 27, 0, 0, 0, 0, 0, 0,
+	0, 0, 108, 0, 0, 0, 88, 0, 96, 97,
+	98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+	124, 128, 0, 0, 0, 0, 126, 0, 0, 0,
+	54, 0, 57, 50, 0, 62, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 80, 0, 110, 113, 114,
+	0, 87, 89, 90, 91, 0, 0, 129, 139, 0,
+	141, 0, 115, 119, 122, 120, 121, 123, 0, 222,
+	175, 55, 0, 63, 50, 0, 60, 29, 0, 0,
+	0, 0, 0, 0, 74, 0, 0, 0, 0, 85,
+	86, 0, 0, 0, 0, 0, 174, 56, 0, 0,
+	0, 59, 71, 0, 146, 72, 0, 146, 73, 0,
+	146, 0, 79, 109, 81, 0, 0, 0, 0, 0,
+	64, 50, 65, 50, 66, 0, 0, 0, 0, 0,
+	0, 0, 0, 0, 140, 142, 0, 0, 0, 0,
+	82, 0, 83, 0, 84, 78, 92, 0, 0, 0,
+	67, 68, 75, 76, 77, 0, 0, 0, 0, 94,
+	0, 172, 178, 93, 0, 0, 177, 179, 180, 0,
+	173, 176, 95,
+}
+var protoTok1 = [...]int{
+
+	1, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 78, 3, 76, 75, 74, 72, 3,
+	67, 68, 71, 65, 62, 66, 61, 59, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 55, 54,
+	64, 53, 63, 60, 77, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 69, 58, 70, 73, 3, 80, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 56, 3, 57, 79,
+}
+var protoTok2 = [...]int{
+
+	2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+	12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
+	22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+	32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+	42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+	52,
+}
+var protoTok3 = [...]int{
+	0,
+}
+
+var protoErrorMessages = [...]struct {
+	state int
+	token int
+	msg   string
+}{}
+
+//line yaccpar:1
+
+/*	parser for yacc output	*/
+
+var (
+	protoDebug        = 0
+	protoErrorVerbose = false
+)
+
+type protoLexer interface {
+	Lex(lval *protoSymType) int
+	Error(s string)
+}
+
+type protoParser interface {
+	Parse(protoLexer) int
+	Lookahead() int
+}
+
+type protoParserImpl struct {
+	lval  protoSymType
+	stack [protoInitialStackSize]protoSymType
+	char  int
+}
+
+func (p *protoParserImpl) Lookahead() int {
+	return p.char
+}
+
+func protoNewParser() protoParser {
+	return &protoParserImpl{}
+}
+
+const protoFlag = -1000
+
+func protoTokname(c int) string {
+	if c >= 1 && c-1 < len(protoToknames) {
+		if protoToknames[c-1] != "" {
+			return protoToknames[c-1]
+		}
+	}
+	return __yyfmt__.Sprintf("tok-%v", c)
+}
+
+func protoStatname(s int) string {
+	if s >= 0 && s < len(protoStatenames) {
+		if protoStatenames[s] != "" {
+			return protoStatenames[s]
+		}
+	}
+	return __yyfmt__.Sprintf("state-%v", s)
+}
+
+func protoErrorMessage(state, lookAhead int) string {
+	const TOKSTART = 4
+
+	if !protoErrorVerbose {
+		return "syntax error"
+	}
+
+	for _, e := range protoErrorMessages {
+		if e.state == state && e.token == lookAhead {
+			return "syntax error: " + e.msg
+		}
+	}
+
+	res := "syntax error: unexpected " + protoTokname(lookAhead)
+
+	// To match Bison, suggest at most four expected tokens.
+	expected := make([]int, 0, 4)
+
+	// Look for shiftable tokens.
+	base := protoPact[state]
+	for tok := TOKSTART; tok-1 < len(protoToknames); tok++ {
+		if n := base + tok; n >= 0 && n < protoLast && protoChk[protoAct[n]] == tok {
+			if len(expected) == cap(expected) {
+				return res
+			}
+			expected = append(expected, tok)
+		}
+	}
+
+	if protoDef[state] == -2 {
+		i := 0
+		for protoExca[i] != -1 || protoExca[i+1] != state {
+			i += 2
+		}
+
+		// Look for tokens that we accept or reduce.
+		for i += 2; protoExca[i] >= 0; i += 2 {
+			tok := protoExca[i]
+			if tok < TOKSTART || protoExca[i+1] == 0 {
+				continue
+			}
+			if len(expected) == cap(expected) {
+				return res
+			}
+			expected = append(expected, tok)
+		}
+
+		// If the default action is to accept or reduce, give up.
+		if protoExca[i+1] != 0 {
+			return res
+		}
+	}
+
+	for i, tok := range expected {
+		if i == 0 {
+			res += ", expecting "
+		} else {
+			res += " or "
+		}
+		res += protoTokname(tok)
+	}
+	return res
+}
+
+func protolex1(lex protoLexer, lval *protoSymType) (char, token int) {
+	token = 0
+	char = lex.Lex(lval)
+	if char <= 0 {
+		token = protoTok1[0]
+		goto out
+	}
+	if char < len(protoTok1) {
+		token = protoTok1[char]
+		goto out
+	}
+	if char >= protoPrivate {
+		if char < protoPrivate+len(protoTok2) {
+			token = protoTok2[char-protoPrivate]
+			goto out
+		}
+	}
+	for i := 0; i < len(protoTok3); i += 2 {
+		token = protoTok3[i+0]
+		if token == char {
+			token = protoTok3[i+1]
+			goto out
+		}
+	}
+
+out:
+	if token == 0 {
+		token = protoTok2[1] /* unknown char */
+	}
+	if protoDebug >= 3 {
+		__yyfmt__.Printf("lex %s(%d)\n", protoTokname(token), uint(char))
+	}
+	return char, token
+}
+
+func protoParse(protolex protoLexer) int {
+	return protoNewParser().Parse(protolex)
+}
+
+func (protorcvr *protoParserImpl) Parse(protolex protoLexer) int {
+	var proton int
+	var protoVAL protoSymType
+	var protoDollar []protoSymType
+	_ = protoDollar // silence set and not used
+	protoS := protorcvr.stack[:]
+
+	Nerrs := 0   /* number of errors */
+	Errflag := 0 /* error recovery flag */
+	protostate := 0
+	protorcvr.char = -1
+	prototoken := -1 // protorcvr.char translated into internal numbering
+	defer func() {
+		// Make sure we report no lookahead when not parsing.
+		protostate = -1
+		protorcvr.char = -1
+		prototoken = -1
+	}()
+	protop := -1
+	goto protostack
+
+ret0:
+	return 0
+
+ret1:
+	return 1
+
+protostack:
+	/* put a state and value onto the stack */
+	if protoDebug >= 4 {
+		__yyfmt__.Printf("char %v in %v\n", protoTokname(prototoken), protoStatname(protostate))
+	}
+
+	protop++
+	if protop >= len(protoS) {
+		nyys := make([]protoSymType, len(protoS)*2)
+		copy(nyys, protoS)
+		protoS = nyys
+	}
+	protoS[protop] = protoVAL
+	protoS[protop].yys = protostate
+
+protonewstate:
+	proton = protoPact[protostate]
+	if proton <= protoFlag {
+		goto protodefault /* simple state */
+	}
+	if protorcvr.char < 0 {
+		protorcvr.char, prototoken = protolex1(protolex, &protorcvr.lval)
+	}
+	proton += prototoken
+	if proton < 0 || proton >= protoLast {
+		goto protodefault
+	}
+	proton = protoAct[proton]
+	if protoChk[proton] == prototoken { /* valid shift */
+		protorcvr.char = -1
+		prototoken = -1
+		protoVAL = protorcvr.lval
+		protostate = proton
+		if Errflag > 0 {
+			Errflag--
+		}
+		goto protostack
+	}
+
+protodefault:
+	/* default state action */
+	proton = protoDef[protostate]
+	if proton == -2 {
+		if protorcvr.char < 0 {
+			protorcvr.char, prototoken = protolex1(protolex, &protorcvr.lval)
+		}
+
+		/* look through exception table */
+		xi := 0
+		for {
+			if protoExca[xi+0] == -1 && protoExca[xi+1] == protostate {
+				break
+			}
+			xi += 2
+		}
+		for xi += 2; ; xi += 2 {
+			proton = protoExca[xi+0]
+			if proton < 0 || proton == prototoken {
+				break
+			}
+		}
+		proton = protoExca[xi+1]
+		if proton < 0 {
+			goto ret0
+		}
+	}
+	if proton == 0 {
+		/* error ... attempt to resume parsing */
+		switch Errflag {
+		case 0: /* brand new error */
+			protolex.Error(protoErrorMessage(protostate, prototoken))
+			Nerrs++
+			if protoDebug >= 1 {
+				__yyfmt__.Printf("%s", protoStatname(protostate))
+				__yyfmt__.Printf(" saw %s\n", protoTokname(prototoken))
+			}
+			fallthrough
+
+		case 1, 2: /* incompletely recovered error ... try again */
+			Errflag = 3
+
+			/* find a state where "error" is a legal shift action */
+			for protop >= 0 {
+				proton = protoPact[protoS[protop].yys] + protoErrCode
+				if proton >= 0 && proton < protoLast {
+					protostate = protoAct[proton] /* simulate a shift of "error" */
+					if protoChk[protostate] == protoErrCode {
+						goto protostack
+					}
+				}
+
+				/* the current p has no shift on "error", pop stack */
+				if protoDebug >= 2 {
+					__yyfmt__.Printf("error recovery pops state %d\n", protoS[protop].yys)
+				}
+				protop--
+			}
+			/* there is no state on the stack with an error shift ... abort */
+			goto ret1
+
+		case 3: /* no shift yet; clobber input char */
+			if protoDebug >= 2 {
+				__yyfmt__.Printf("error recovery discards %s\n", protoTokname(prototoken))
+			}
+			if prototoken == protoEofCode {
+				goto ret1
+			}
+			protorcvr.char = -1
+			prototoken = -1
+			goto protonewstate /* try again in the same state */
+		}
+	}
+
+	/* reduction by production proton */
+	if protoDebug >= 2 {
+		__yyfmt__.Printf("reduce %v in:\n\t%v\n", proton, protoStatname(protostate))
+	}
+
+	protont := proton
+	protopt := protop
+	_ = protopt // guard against "declared and not used"
+
+	protop -= protoR2[proton]
+	// protop is now the index of $0. Perform the default action. Iff the
+	// reduced production is ε, $1 is possibly out of range.
+	if protop+1 >= len(protoS) {
+		nyys := make([]protoSymType, len(protoS)*2)
+		copy(nyys, protoS)
+		protoS = nyys
+	}
+	protoVAL = protoS[protop+1]
+
+	/* consult goto table to find next state */
+	proton = protoR1[proton]
+	protog := protoPgo[proton]
+	protoj := protog + protoS[protop].yys + 1
+
+	if protoj >= protoLast {
+		protostate = protoAct[protog]
+	} else {
+		protostate = protoAct[protoj]
+		if protoChk[protostate] != -proton {
+			protostate = protoAct[protog]
+		}
+	}
+	// dummy call; replaced with literal code
+	switch protont {
+
+	case 1:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:114
+		{
+			protoVAL.file = &fileNode{syntax: protoDollar[1].syn}
+			protoVAL.file.setRange(protoDollar[1].syn, protoDollar[1].syn)
+			protolex.(*protoLex).res = protoVAL.file
+		}
+	case 2:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:119
+		{
+			protoVAL.file = &fileNode{decls: protoDollar[1].fileDecls}
+			if len(protoDollar[1].fileDecls) > 0 {
+				protoVAL.file.setRange(protoDollar[1].fileDecls[0], protoDollar[1].fileDecls[len(protoDollar[1].fileDecls)-1])
+			}
+			protolex.(*protoLex).res = protoVAL.file
+		}
+	case 3:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:126
+		{
+			protoVAL.file = &fileNode{syntax: protoDollar[1].syn, decls: protoDollar[2].fileDecls}
+			var end node
+			if len(protoDollar[2].fileDecls) > 0 {
+				end = protoDollar[2].fileDecls[len(protoDollar[2].fileDecls)-1]
+			} else {
+				end = protoDollar[1].syn
+			}
+			protoVAL.file.setRange(protoDollar[1].syn, end)
+			protolex.(*protoLex).res = protoVAL.file
+		}
+	case 4:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:137
+		{
+		}
+	case 5:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:140
+		{
+			protoVAL.fileDecls = append(protoDollar[1].fileDecls, protoDollar[2].fileDecls...)
+		}
+	case 7:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:145
+		{
+			protoVAL.fileDecls = []*fileElement{{imp: protoDollar[1].imprt}}
+		}
+	case 8:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:148
+		{
+			protoVAL.fileDecls = []*fileElement{{pkg: protoDollar[1].pkg}}
+		}
+	case 9:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:151
+		{
+			protoVAL.fileDecls = []*fileElement{{option: protoDollar[1].opts[0]}}
+		}
+	case 10:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:154
+		{
+			protoVAL.fileDecls = []*fileElement{{message: protoDollar[1].msg}}
+		}
+	case 11:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:157
+		{
+			protoVAL.fileDecls = []*fileElement{{enum: protoDollar[1].en}}
+		}
+	case 12:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:160
+		{
+			protoVAL.fileDecls = []*fileElement{{extend: protoDollar[1].extend}}
+		}
+	case 13:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:163
+		{
+			protoVAL.fileDecls = []*fileElement{{service: protoDollar[1].svc}}
+		}
+	case 14:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:166
+		{
+			protoVAL.fileDecls = []*fileElement{{empty: protoDollar[1].b}}
+		}
+	case 15:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:170
+		{
+			if protoDollar[3].str.val != "proto2" && protoDollar[3].str.val != "proto3" {
+				lexError(protolex, protoDollar[3].str.start(), "syntax value must be 'proto2' or 'proto3'")
+			}
+			protoVAL.syn = &syntaxNode{syntax: protoDollar[3].str}
+			protoVAL.syn.setRange(protoDollar[1].id, protoDollar[4].b)
+		}
+	case 16:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:178
+		{
+			protoVAL.imprt = &importNode{name: protoDollar[2].str}
+			protoVAL.imprt.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 17:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:182
+		{
+			protoVAL.imprt = &importNode{name: protoDollar[3].str, weak: true}
+			protoVAL.imprt.setRange(protoDollar[1].id, protoDollar[4].b)
+		}
+	case 18:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:186
+		{
+			protoVAL.imprt = &importNode{name: protoDollar[3].str, public: true}
+			protoVAL.imprt.setRange(protoDollar[1].id, protoDollar[4].b)
+		}
+	case 19:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:191
+		{
+			protoVAL.pkg = &packageNode{name: protoDollar[2].id}
+			protoVAL.pkg.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 22:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:199
+		{
+			n := &optionNameNode{parts: protoDollar[2].optNm}
+			n.setRange(protoDollar[2].optNm[0], protoDollar[2].optNm[len(protoDollar[2].optNm)-1])
+			o := &optionNode{name: n, val: protoDollar[4].v}
+			o.setRange(protoDollar[1].id, protoDollar[5].b)
+			protoVAL.opts = []*optionNode{o}
+		}
+	case 23:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:207
+		{
+			protoVAL.optNm = toNameParts(protoDollar[1].id, 0)
+		}
+	case 24:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:210
+		{
+			p := &optionNamePartNode{text: protoDollar[2].id, isExtension: true}
+			p.setRange(protoDollar[1].b, protoDollar[3].b)
+			protoVAL.optNm = []*optionNamePartNode{p}
+		}
+	case 25:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:215
+		{
+			p := &optionNamePartNode{text: protoDollar[2].id, isExtension: true}
+			p.setRange(protoDollar[1].b, protoDollar[3].b)
+			ps := make([]*optionNamePartNode, 1, len(protoDollar[4].optNm)+1)
+			ps[0] = p
+			protoVAL.optNm = append(ps, protoDollar[4].optNm...)
+		}
+	case 27:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:224
+		{
+			protoVAL.optNm = append(protoDollar[1].optNm, protoDollar[2].optNm...)
+		}
+	case 28:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:228
+		{
+			protoVAL.optNm = toNameParts(protoDollar[1].id, 1 /* exclude leading dot */)
+		}
+	case 29:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:231
+		{
+			p := &optionNamePartNode{text: protoDollar[3].id, isExtension: true}
+			p.setRange(protoDollar[2].b, protoDollar[4].b)
+			protoVAL.optNm = []*optionNamePartNode{p}
+		}
+	case 32:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:240
+		{
+			protoVAL.v = protoDollar[1].str
+		}
+	case 33:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:243
+		{
+			protoVAL.v = protoDollar[1].ui
+		}
+	case 34:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:246
+		{
+			protoVAL.v = protoDollar[1].i
+		}
+	case 35:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:249
+		{
+			protoVAL.v = protoDollar[1].f
+		}
+	case 36:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:252
+		{
+			if protoDollar[1].id.val == "true" {
+				protoVAL.v = &boolLiteralNode{basicNode: protoDollar[1].id.basicNode, val: true}
+			} else if protoDollar[1].id.val == "false" {
+				protoVAL.v = &boolLiteralNode{basicNode: protoDollar[1].id.basicNode, val: false}
+			} else if protoDollar[1].id.val == "inf" {
+				f := &floatLiteralNode{val: math.Inf(1)}
+				f.setRange(protoDollar[1].id, protoDollar[1].id)
+				protoVAL.v = f
+			} else if protoDollar[1].id.val == "nan" {
+				f := &floatLiteralNode{val: math.NaN()}
+				f.setRange(protoDollar[1].id, protoDollar[1].id)
+				protoVAL.v = f
+			} else {
+				protoVAL.v = protoDollar[1].id
+			}
+		}
+	case 38:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:271
+		{
+			protoVAL.ui = protoDollar[2].ui
+		}
+	case 39:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:275
+		{
+			if protoDollar[2].ui.val > math.MaxInt64+1 {
+				lexError(protolex, protoDollar[2].ui.start(), fmt.Sprintf("numeric constant %d would underflow (allowed range is %d to %d)", protoDollar[2].ui.val, int64(math.MinInt64), int64(math.MaxInt64)))
+			}
+			protoVAL.i = &negativeIntLiteralNode{val: -int64(protoDollar[2].ui.val)}
+			protoVAL.i.setRange(protoDollar[1].b, protoDollar[2].ui)
+		}
+	case 41:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:284
+		{
+			protoVAL.f = &floatLiteralNode{val: -protoDollar[2].f.val}
+			protoVAL.f.setRange(protoDollar[1].b, protoDollar[2].f)
+		}
+	case 42:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:288
+		{
+			protoVAL.f = &floatLiteralNode{val: protoDollar[2].f.val}
+			protoVAL.f.setRange(protoDollar[1].b, protoDollar[2].f)
+		}
+	case 43:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:292
+		{
+			protoVAL.f = &floatLiteralNode{val: math.Inf(1)}
+			protoVAL.f.setRange(protoDollar[1].b, protoDollar[2].id)
+		}
+	case 44:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:296
+		{
+			protoVAL.f = &floatLiteralNode{val: math.Inf(-1)}
+			protoVAL.f.setRange(protoDollar[1].b, protoDollar[2].id)
+		}
+	case 46:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:302
+		{
+			protoVAL.str = &stringLiteralNode{val: protoDollar[1].str.val + protoDollar[2].str.val}
+			protoVAL.str.setRange(protoDollar[1].str, protoDollar[2].str)
+		}
+	case 47:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:307
+		{
+			a := &aggregateLiteralNode{elements: protoDollar[2].agg}
+			a.setRange(protoDollar[1].b, protoDollar[3].b)
+			protoVAL.v = a
+		}
+	case 49:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:314
+		{
+			protoVAL.agg = append(protoDollar[1].agg, protoDollar[2].agg...)
+		}
+	case 50:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:317
+		{
+			protoVAL.agg = nil
+		}
+	case 52:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:322
+		{
+			protoVAL.agg = protoDollar[1].agg
+		}
+	case 53:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:325
+		{
+			protoVAL.agg = protoDollar[1].agg
+		}
+	case 54:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:329
+		{
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: protoDollar[3].v}
+			a.setRange(protoDollar[1].aggName, protoDollar[3].v)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 55:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:334
+		{
+			s := &sliceLiteralNode{}
+			s.setRange(protoDollar[3].b, protoDollar[4].b)
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s}
+			a.setRange(protoDollar[1].aggName, protoDollar[4].b)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 56:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:341
+		{
+			s := &sliceLiteralNode{elements: protoDollar[4].sl}
+			s.setRange(protoDollar[3].b, protoDollar[5].b)
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s}
+			a.setRange(protoDollar[1].aggName, protoDollar[5].b)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 57:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:348
+		{
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: protoDollar[3].v}
+			a.setRange(protoDollar[1].aggName, protoDollar[3].v)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 58:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:353
+		{
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: protoDollar[2].v}
+			a.setRange(protoDollar[1].aggName, protoDollar[2].v)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 59:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:358
+		{
+			s := &aggregateLiteralNode{elements: protoDollar[4].agg}
+			s.setRange(protoDollar[3].b, protoDollar[5].b)
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s}
+			a.setRange(protoDollar[1].aggName, protoDollar[5].b)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 60:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:365
+		{
+			s := &aggregateLiteralNode{elements: protoDollar[3].agg}
+			s.setRange(protoDollar[2].b, protoDollar[4].b)
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s}
+			a.setRange(protoDollar[1].aggName, protoDollar[4].b)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 61:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:373
+		{
+			protoVAL.aggName = &aggregateNameNode{name: protoDollar[1].id}
+			protoVAL.aggName.setRange(protoDollar[1].id, protoDollar[1].id)
+		}
+	case 62:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:377
+		{
+			protoVAL.aggName = &aggregateNameNode{name: protoDollar[2].id, isExtension: true}
+			protoVAL.aggName.setRange(protoDollar[1].b, protoDollar[3].b)
+		}
+	case 63:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:382
+		{
+			protoVAL.sl = []valueNode{protoDollar[1].v}
+		}
+	case 64:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:385
+		{
+			protoVAL.sl = append(protoDollar[1].sl, protoDollar[3].v)
+		}
+	case 65:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:388
+		{
+			protoVAL.sl = append(protoDollar[1].sl, protoDollar[3].v)
+		}
+	case 66:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:391
+		{
+			s := &aggregateLiteralNode{elements: protoDollar[2].agg}
+			s.setRange(protoDollar[1].b, protoDollar[3].b)
+			protoVAL.sl = []valueNode{s}
+		}
+	case 67:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:396
+		{
+			s := &aggregateLiteralNode{elements: protoDollar[4].agg}
+			s.setRange(protoDollar[3].b, protoDollar[5].b)
+			protoVAL.sl = append(protoDollar[1].sl, s)
+		}
+	case 68:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:401
+		{
+			s := &aggregateLiteralNode{elements: protoDollar[4].agg}
+			s.setRange(protoDollar[3].b, protoDollar[5].b)
+			protoVAL.sl = append(protoDollar[1].sl, s)
+		}
+	case 71:
+		protoDollar = protoS[protopt-6 : protopt+1]
+//line proto.y:410
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, required: true}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[6].b)
+		}
+	case 72:
+		protoDollar = protoS[protopt-6 : protopt+1]
+//line proto.y:416
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[6].b)
+		}
+	case 73:
+		protoDollar = protoS[protopt-6 : protopt+1]
+//line proto.y:422
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, repeated: true}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[6].b)
+		}
+	case 74:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:428
+		{
+			checkTag(protolex, protoDollar[4].ui.start(), protoDollar[4].ui.val)
+			protoVAL.fld = &fieldNode{fldType: protoDollar[1].id, name: protoDollar[2].id, tag: protoDollar[4].ui}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 75:
+		protoDollar = protoS[protopt-9 : protopt+1]
+//line proto.y:433
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, required: true}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui, options: protoDollar[7].opts}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[9].b)
+		}
+	case 76:
+		protoDollar = protoS[protopt-9 : protopt+1]
+//line proto.y:439
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui, options: protoDollar[7].opts}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[9].b)
+		}
+	case 77:
+		protoDollar = protoS[protopt-9 : protopt+1]
+//line proto.y:445
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, repeated: true}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui, options: protoDollar[7].opts}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[9].b)
+		}
+	case 78:
+		protoDollar = protoS[protopt-8 : protopt+1]
+//line proto.y:451
+		{
+			checkTag(protolex, protoDollar[4].ui.start(), protoDollar[4].ui.val)
+			protoVAL.fld = &fieldNode{fldType: protoDollar[1].id, name: protoDollar[2].id, tag: protoDollar[4].ui, options: protoDollar[6].opts}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[8].b)
+		}
+	case 79:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:457
+		{
+			protoVAL.opts = append(protoDollar[1].opts, protoDollar[3].opts...)
+		}
+	case 81:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:462
+		{
+			n := &optionNameNode{parts: protoDollar[1].optNm}
+			n.setRange(protoDollar[1].optNm[0], protoDollar[1].optNm[len(protoDollar[1].optNm)-1])
+			o := &optionNode{name: n, val: protoDollar[3].v}
+			o.setRange(protoDollar[1].optNm[0], protoDollar[3].v)
+			protoVAL.opts = []*optionNode{o}
+		}
+	case 82:
+		protoDollar = protoS[protopt-8 : protopt+1]
+//line proto.y:470
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			if !unicode.IsUpper(rune(protoDollar[3].id.val[0])) {
+				lexError(protolex, protoDollar[3].id.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", protoDollar[3].id.val))
+			}
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, required: true}
+			protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].ui, decls: protoDollar[7].msgDecls}
+			protoVAL.grp.setRange(protoDollar[1].id, protoDollar[8].b)
+		}
+	case 83:
+		protoDollar = protoS[protopt-8 : protopt+1]
+//line proto.y:479
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			if !unicode.IsUpper(rune(protoDollar[3].id.val[0])) {
+				lexError(protolex, protoDollar[3].id.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", protoDollar[3].id.val))
+			}
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode}
+			protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].ui, decls: protoDollar[7].msgDecls}
+			protoVAL.grp.setRange(protoDollar[1].id, protoDollar[8].b)
+		}
+	case 84:
+		protoDollar = protoS[protopt-8 : protopt+1]
+//line proto.y:488
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			if !unicode.IsUpper(rune(protoDollar[3].id.val[0])) {
+				lexError(protolex, protoDollar[3].id.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", protoDollar[3].id.val))
+			}
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, repeated: true}
+			protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].ui, decls: protoDollar[7].msgDecls}
+			protoVAL.grp.setRange(protoDollar[1].id, protoDollar[8].b)
+		}
+	case 85:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:498
+		{
+			c := 0
+			for _, el := range protoDollar[4].ooDecls {
+				if el.field != nil {
+					c++
+				}
+			}
+			if c == 0 {
+				lexError(protolex, protoDollar[1].id.start(), "oneof must contain at least one field")
+			}
+			protoVAL.oo = &oneOfNode{name: protoDollar[2].id, decls: protoDollar[4].ooDecls}
+			protoVAL.oo.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 86:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:512
+		{
+			protoVAL.ooDecls = append(protoDollar[1].ooDecls, protoDollar[2].ooDecls...)
+		}
+	case 88:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:516
+		{
+			protoVAL.ooDecls = nil
+		}
+	case 89:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:520
+		{
+			protoVAL.ooDecls = []*oneOfElement{{option: protoDollar[1].opts[0]}}
+		}
+	case 90:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:523
+		{
+			protoVAL.ooDecls = []*oneOfElement{{field: protoDollar[1].fld}}
+		}
+	case 91:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:526
+		{
+			protoVAL.ooDecls = []*oneOfElement{{empty: protoDollar[1].b}}
+		}
+	case 92:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:530
+		{
+			checkTag(protolex, protoDollar[4].ui.start(), protoDollar[4].ui.val)
+			protoVAL.fld = &fieldNode{fldType: protoDollar[1].id, name: protoDollar[2].id, tag: protoDollar[4].ui}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 93:
+		protoDollar = protoS[protopt-8 : protopt+1]
+//line proto.y:535
+		{
+			checkTag(protolex, protoDollar[4].ui.start(), protoDollar[4].ui.val)
+			protoVAL.fld = &fieldNode{fldType: protoDollar[1].id, name: protoDollar[2].id, tag: protoDollar[4].ui, options: protoDollar[6].opts}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[8].b)
+		}
+	case 94:
+		protoDollar = protoS[protopt-10 : protopt+1]
+//line proto.y:541
+		{
+			checkTag(protolex, protoDollar[9].ui.start(), protoDollar[9].ui.val)
+			protoVAL.mapFld = &mapFieldNode{mapKeyword: protoDollar[1].id, keyType: protoDollar[3].id, valueType: protoDollar[5].id, name: protoDollar[7].id, tag: protoDollar[9].ui}
+			protoVAL.mapFld.setRange(protoDollar[1].id, protoDollar[10].b)
+		}
+	case 95:
+		protoDollar = protoS[protopt-13 : protopt+1]
+//line proto.y:546
+		{
+			checkTag(protolex, protoDollar[9].ui.start(), protoDollar[9].ui.val)
+			protoVAL.mapFld = &mapFieldNode{mapKeyword: protoDollar[1].id, keyType: protoDollar[3].id, valueType: protoDollar[5].id, name: protoDollar[7].id, tag: protoDollar[9].ui, options: protoDollar[11].opts}
+			protoVAL.mapFld.setRange(protoDollar[1].id, protoDollar[13].b)
+		}
+	case 108:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:565
+		{
+			protoVAL.ext = &extensionRangeNode{ranges: protoDollar[2].rngs}
+			protoVAL.ext.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 109:
+		protoDollar = protoS[protopt-6 : protopt+1]
+//line proto.y:569
+		{
+			protoVAL.ext = &extensionRangeNode{ranges: protoDollar[2].rngs, options: protoDollar[4].opts}
+			protoVAL.ext.setRange(protoDollar[1].id, protoDollar[6].b)
+		}
+	case 110:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:574
+		{
+			protoVAL.rngs = append(protoDollar[1].rngs, protoDollar[3].rngs...)
+		}
+	case 112:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:579
+		{
+			if protoDollar[1].ui.val > internal.MaxTag {
+				lexError(protolex, protoDollar[1].ui.start(), fmt.Sprintf("range includes out-of-range tag: %d (should be between 0 and %d)", protoDollar[1].ui.val, internal.MaxTag))
+			}
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[1].ui, st: int32(protoDollar[1].ui.val), en: int32(protoDollar[1].ui.val)}
+			r.setRange(protoDollar[1].ui, protoDollar[1].ui)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 113:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:587
+		{
+			if protoDollar[1].ui.val > internal.MaxTag {
+				lexError(protolex, protoDollar[1].ui.start(), fmt.Sprintf("range start is out-of-range tag: %d (should be between 0 and %d)", protoDollar[1].ui.val, internal.MaxTag))
+			}
+			if protoDollar[3].ui.val > internal.MaxTag {
+				lexError(protolex, protoDollar[3].ui.start(), fmt.Sprintf("range end is out-of-range tag: %d (should be between 0 and %d)", protoDollar[3].ui.val, internal.MaxTag))
+			}
+			if protoDollar[1].ui.val > protoDollar[3].ui.val {
+				lexError(protolex, protoDollar[1].ui.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", protoDollar[1].ui.val, protoDollar[3].ui.val))
+			}
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[3].ui, st: int32(protoDollar[1].ui.val), en: int32(protoDollar[3].ui.val)}
+			r.setRange(protoDollar[1].ui, protoDollar[3].ui)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 114:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:601
+		{
+			if protoDollar[1].ui.val > internal.MaxTag {
+				lexError(protolex, protoDollar[1].ui.start(), fmt.Sprintf("range start is out-of-range tag: %d (should be between 0 and %d)", protoDollar[1].ui.val, internal.MaxTag))
+			}
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[3].id, st: int32(protoDollar[1].ui.val), en: internal.MaxTag}
+			r.setRange(protoDollar[1].ui, protoDollar[3].id)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 115:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:610
+		{
+			protoVAL.rngs = append(protoDollar[1].rngs, protoDollar[3].rngs...)
+		}
+	case 117:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:615
+		{
+			checkUint64InInt32Range(protolex, protoDollar[1].ui.start(), protoDollar[1].ui.val)
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[1].ui, st: int32(protoDollar[1].ui.val), en: int32(protoDollar[1].ui.val)}
+			r.setRange(protoDollar[1].ui, protoDollar[1].ui)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 118:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:621
+		{
+			checkInt64InInt32Range(protolex, protoDollar[1].i.start(), protoDollar[1].i.val)
+			r := &rangeNode{stNode: protoDollar[1].i, enNode: protoDollar[1].i, st: int32(protoDollar[1].i.val), en: int32(protoDollar[1].i.val)}
+			r.setRange(protoDollar[1].i, protoDollar[1].i)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 119:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:627
+		{
+			checkUint64InInt32Range(protolex, protoDollar[1].ui.start(), protoDollar[1].ui.val)
+			checkUint64InInt32Range(protolex, protoDollar[3].ui.start(), protoDollar[3].ui.val)
+			if protoDollar[1].ui.val > protoDollar[3].ui.val {
+				lexError(protolex, protoDollar[1].ui.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", protoDollar[1].ui.val, protoDollar[3].ui.val))
+			}
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[3].ui, st: int32(protoDollar[1].ui.val), en: int32(protoDollar[3].ui.val)}
+			r.setRange(protoDollar[1].ui, protoDollar[3].ui)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 120:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:637
+		{
+			checkInt64InInt32Range(protolex, protoDollar[1].i.start(), protoDollar[1].i.val)
+			checkInt64InInt32Range(protolex, protoDollar[3].i.start(), protoDollar[3].i.val)
+			if protoDollar[1].i.val > protoDollar[3].i.val {
+				lexError(protolex, protoDollar[1].i.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", protoDollar[1].i.val, protoDollar[3].i.val))
+			}
+			r := &rangeNode{stNode: protoDollar[1].i, enNode: protoDollar[3].i, st: int32(protoDollar[1].i.val), en: int32(protoDollar[3].i.val)}
+			r.setRange(protoDollar[1].i, protoDollar[3].i)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 121:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:647
+		{
+			checkInt64InInt32Range(protolex, protoDollar[1].i.start(), protoDollar[1].i.val)
+			checkUint64InInt32Range(protolex, protoDollar[3].ui.start(), protoDollar[3].ui.val)
+			r := &rangeNode{stNode: protoDollar[1].i, enNode: protoDollar[3].ui, st: int32(protoDollar[1].i.val), en: int32(protoDollar[3].ui.val)}
+			r.setRange(protoDollar[1].i, protoDollar[3].ui)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 122:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:654
+		{
+			checkUint64InInt32Range(protolex, protoDollar[1].ui.start(), protoDollar[1].ui.val)
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[3].id, st: int32(protoDollar[1].ui.val), en: math.MaxInt32}
+			r.setRange(protoDollar[1].ui, protoDollar[3].id)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 123:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:660
+		{
+			checkInt64InInt32Range(protolex, protoDollar[1].i.start(), protoDollar[1].i.val)
+			r := &rangeNode{stNode: protoDollar[1].i, enNode: protoDollar[3].id, st: int32(protoDollar[1].i.val), en: math.MaxInt32}
+			r.setRange(protoDollar[1].i, protoDollar[3].id)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 124:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:667
+		{
+			protoVAL.resvd = &reservedNode{ranges: protoDollar[2].rngs}
+			protoVAL.resvd.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 126:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:673
+		{
+			protoVAL.resvd = &reservedNode{ranges: protoDollar[2].rngs}
+			protoVAL.resvd.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 128:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:679
+		{
+			rsvd := map[string]struct{}{}
+			for _, n := range protoDollar[2].names {
+				if _, ok := rsvd[n.val]; ok {
+					lexError(protolex, n.start(), fmt.Sprintf("name %q is reserved multiple times", n.val))
+					break
+				}
+				rsvd[n.val] = struct{}{}
+			}
+			protoVAL.resvd = &reservedNode{names: protoDollar[2].names}
+			protoVAL.resvd.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 129:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:692
+		{
+			protoVAL.names = append(protoDollar[1].names, protoDollar[3].str)
+		}
+	case 130:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:695
+		{
+			protoVAL.names = []*stringLiteralNode{protoDollar[1].str}
+		}
+	case 131:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:699
+		{
+			c := 0
+			for _, el := range protoDollar[4].enDecls {
+				if el.value != nil {
+					c++
+				}
+			}
+			if c == 0 {
+				lexError(protolex, protoDollar[1].id.start(), "enums must define at least one value")
+			}
+			protoVAL.en = &enumNode{name: protoDollar[2].id, decls: protoDollar[4].enDecls}
+			protoVAL.en.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 132:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:713
+		{
+			protoVAL.enDecls = append(protoDollar[1].enDecls, protoDollar[2].enDecls...)
+		}
+	case 134:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:717
+		{
+			protoVAL.enDecls = nil
+		}
+	case 135:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:721
+		{
+			protoVAL.enDecls = []*enumElement{{option: protoDollar[1].opts[0]}}
+		}
+	case 136:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:724
+		{
+			protoVAL.enDecls = []*enumElement{{value: protoDollar[1].env}}
+		}
+	case 137:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:727
+		{
+			protoVAL.enDecls = []*enumElement{{reserved: protoDollar[1].resvd}}
+		}
+	case 138:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:730
+		{
+			protoVAL.enDecls = []*enumElement{{empty: protoDollar[1].b}}
+		}
+	case 139:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:734
+		{
+			checkUint64InInt32Range(protolex, protoDollar[3].ui.start(), protoDollar[3].ui.val)
+			protoVAL.env = &enumValueNode{name: protoDollar[1].id, numberP: protoDollar[3].ui}
+			protoVAL.env.setRange(protoDollar[1].id, protoDollar[4].b)
+		}
+	case 140:
+		protoDollar = protoS[protopt-7 : protopt+1]
+//line proto.y:739
+		{
+			checkUint64InInt32Range(protolex, protoDollar[3].ui.start(), protoDollar[3].ui.val)
+			protoVAL.env = &enumValueNode{name: protoDollar[1].id, numberP: protoDollar[3].ui, options: protoDollar[5].opts}
+			protoVAL.env.setRange(protoDollar[1].id, protoDollar[7].b)
+		}
+	case 141:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:744
+		{
+			checkInt64InInt32Range(protolex, protoDollar[3].i.start(), protoDollar[3].i.val)
+			protoVAL.env = &enumValueNode{name: protoDollar[1].id, numberN: protoDollar[3].i}
+			protoVAL.env.setRange(protoDollar[1].id, protoDollar[4].b)
+		}
+	case 142:
+		protoDollar = protoS[protopt-7 : protopt+1]
+//line proto.y:749
+		{
+			checkInt64InInt32Range(protolex, protoDollar[3].i.start(), protoDollar[3].i.val)
+			protoVAL.env = &enumValueNode{name: protoDollar[1].id, numberN: protoDollar[3].i, options: protoDollar[5].opts}
+			protoVAL.env.setRange(protoDollar[1].id, protoDollar[7].b)
+		}
+	case 143:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:755
+		{
+			protoVAL.msg = &messageNode{name: protoDollar[2].id, decls: protoDollar[4].msgDecls}
+			protoVAL.msg.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 144:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:760
+		{
+			protoVAL.msgDecls = append(protoDollar[1].msgDecls, protoDollar[2].msgDecls...)
+		}
+	case 146:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:764
+		{
+			protoVAL.msgDecls = nil
+		}
+	case 147:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:768
+		{
+			protoVAL.msgDecls = []*messageElement{{field: protoDollar[1].fld}}
+		}
+	case 148:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:771
+		{
+			protoVAL.msgDecls = []*messageElement{{enum: protoDollar[1].en}}
+		}
+	case 149:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:774
+		{
+			protoVAL.msgDecls = []*messageElement{{nested: protoDollar[1].msg}}
+		}
+	case 150:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:777
+		{
+			protoVAL.msgDecls = []*messageElement{{extend: protoDollar[1].extend}}
+		}
+	case 151:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:780
+		{
+			protoVAL.msgDecls = []*messageElement{{extensionRange: protoDollar[1].ext}}
+		}
+	case 152:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:783
+		{
+			protoVAL.msgDecls = []*messageElement{{group: protoDollar[1].grp}}
+		}
+	case 153:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:786
+		{
+			protoVAL.msgDecls = []*messageElement{{option: protoDollar[1].opts[0]}}
+		}
+	case 154:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:789
+		{
+			protoVAL.msgDecls = []*messageElement{{oneOf: protoDollar[1].oo}}
+		}
+	case 155:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:792
+		{
+			protoVAL.msgDecls = []*messageElement{{mapField: protoDollar[1].mapFld}}
+		}
+	case 156:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:795
+		{
+			protoVAL.msgDecls = []*messageElement{{reserved: protoDollar[1].resvd}}
+		}
+	case 157:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:798
+		{
+			protoVAL.msgDecls = []*messageElement{{empty: protoDollar[1].b}}
+		}
+	case 158:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:802
+		{
+			c := 0
+			for _, el := range protoDollar[4].extDecls {
+				if el.field != nil || el.group != nil {
+					c++
+				}
+			}
+			if c == 0 {
+				lexError(protolex, protoDollar[1].id.start(), "extend sections must define at least one extension")
+			}
+			protoVAL.extend = &extendNode{extendee: protoDollar[2].id, decls: protoDollar[4].extDecls}
+			protoVAL.extend.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 159:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:816
+		{
+			protoVAL.extDecls = append(protoDollar[1].extDecls, protoDollar[2].extDecls...)
+		}
+	case 161:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:820
+		{
+			protoVAL.extDecls = nil
+		}
+	case 162:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:824
+		{
+			protoVAL.extDecls = []*extendElement{{field: protoDollar[1].fld}}
+		}
+	case 163:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:827
+		{
+			protoVAL.extDecls = []*extendElement{{group: protoDollar[1].grp}}
+		}
+	case 164:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:830
+		{
+			protoVAL.extDecls = []*extendElement{{empty: protoDollar[1].b}}
+		}
+	case 165:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:834
+		{
+			protoVAL.svc = &serviceNode{name: protoDollar[2].id, decls: protoDollar[4].svcDecls}
+			protoVAL.svc.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 166:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:839
+		{
+			protoVAL.svcDecls = append(protoDollar[1].svcDecls, protoDollar[2].svcDecls...)
+		}
+	case 168:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:843
+		{
+			protoVAL.svcDecls = nil
+		}
+	case 169:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:850
+		{
+			protoVAL.svcDecls = []*serviceElement{{option: protoDollar[1].opts[0]}}
+		}
+	case 170:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:853
+		{
+			protoVAL.svcDecls = []*serviceElement{{rpc: protoDollar[1].mtd}}
+		}
+	case 171:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:856
+		{
+			protoVAL.svcDecls = []*serviceElement{{empty: protoDollar[1].b}}
+		}
+	case 172:
+		protoDollar = protoS[protopt-10 : protopt+1]
+//line proto.y:860
+		{
+			protoVAL.mtd = &methodNode{name: protoDollar[2].id, input: protoDollar[4].rpcType, output: protoDollar[8].rpcType}
+			protoVAL.mtd.setRange(protoDollar[1].id, protoDollar[10].b)
+		}
+	case 173:
+		protoDollar = protoS[protopt-12 : protopt+1]
+//line proto.y:864
+		{
+			protoVAL.mtd = &methodNode{name: protoDollar[2].id, input: protoDollar[4].rpcType, output: protoDollar[8].rpcType, options: protoDollar[11].opts}
+			protoVAL.mtd.setRange(protoDollar[1].id, protoDollar[12].b)
+		}
+	case 174:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:869
+		{
+			protoVAL.rpcType = &rpcTypeNode{msgType: protoDollar[2].id, streamKeyword: protoDollar[1].id}
+			protoVAL.rpcType.setRange(protoDollar[1].id, protoDollar[2].id)
+		}
+	case 175:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:873
+		{
+			protoVAL.rpcType = &rpcTypeNode{msgType: protoDollar[1].id}
+			protoVAL.rpcType.setRange(protoDollar[1].id, protoDollar[1].id)
+		}
+	case 176:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:878
+		{
+			protoVAL.opts = append(protoDollar[1].opts, protoDollar[2].opts...)
+		}
+	case 178:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:882
+		{
+			protoVAL.opts = []*optionNode{}
+		}
+	case 179:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:886
+		{
+			protoVAL.opts = protoDollar[1].opts
+		}
+	case 180:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:889
+		{
+			protoVAL.opts = []*optionNode{}
+		}
+	}
+	goto protostack /* stack new state and value */
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/source_code_info.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/source_code_info.go
new file mode 100644
index 0000000..d0a61c2
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/source_code_info.go
@@ -0,0 +1,612 @@
+package protoparse
+
+import (
+	"bytes"
+	"reflect"
+	"sort"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/desc/internal"
+)
+
+func (r *parseResult) generateSourceCodeInfo() *dpb.SourceCodeInfo {
+	if r.nodes == nil {
+		// skip files that do not have AST info (these will be files
+		// that came from well-known descriptors, instead of from source)
+		return nil
+	}
+
+	sci := sourceCodeInfo{commentsUsed: map[*comment]struct{}{}}
+	path := make([]int32, 0, 10)
+
+	fn := r.getFileNode(r.fd).(*fileNode)
+	if fn.syntax != nil {
+		sci.newLoc(fn.syntax, append(path, internal.File_syntaxTag))
+	}
+	if fn.pkg != nil {
+		sci.newLoc(fn.pkg, append(path, internal.File_packageTag))
+	}
+	for i, imp := range fn.imports {
+		sci.newLoc(imp, append(path, internal.File_dependencyTag, int32(i)))
+	}
+
+	// file options
+	r.generateSourceCodeInfoForOptions(&sci, fn.decls, func(n interface{}) *optionNode {
+		return n.(*fileElement).option
+	}, r.fd.Options.GetUninterpretedOption(), append(path, internal.File_optionsTag))
+
+	// message types
+	for i, msg := range r.fd.GetMessageType() {
+		r.generateSourceCodeInfoForMessage(&sci, msg, append(path, internal.File_messagesTag, int32(i)))
+	}
+
+	// enum types
+	for i, enum := range r.fd.GetEnumType() {
+		r.generateSourceCodeInfoForEnum(&sci, enum, append(path, internal.File_enumsTag, int32(i)))
+	}
+
+	// extension fields
+	for i, ext := range r.fd.GetExtension() {
+		r.generateSourceCodeInfoForField(&sci, ext, append(path, internal.File_extensionsTag, int32(i)))
+	}
+
+	// services and methods
+	for i, svc := range r.fd.GetService() {
+		n := r.getServiceNode(svc).(*serviceNode)
+		svcPath := append(path, internal.File_servicesTag, int32(i))
+		sci.newLoc(n, svcPath)
+		sci.newLoc(n.name, append(svcPath, internal.Service_nameTag))
+
+		// service options
+		r.generateSourceCodeInfoForOptions(&sci, n.decls, func(n interface{}) *optionNode {
+			return n.(*serviceElement).option
+		}, svc.Options.GetUninterpretedOption(), append(svcPath, internal.Service_optionsTag))
+
+		// methods
+		for j, mtd := range svc.GetMethod() {
+			mn := r.getMethodNode(mtd).(*methodNode)
+			mtdPath := append(svcPath, internal.Service_methodsTag, int32(j))
+			sci.newLoc(mn, mtdPath)
+			sci.newLoc(mn.name, append(mtdPath, internal.Method_nameTag))
+
+			sci.newLoc(mn.input.msgType, append(mtdPath, internal.Method_inputTag))
+			if mn.input.streamKeyword != nil {
+				sci.newLoc(mn.input.streamKeyword, append(mtdPath, internal.Method_inputStreamTag))
+			}
+			sci.newLoc(mn.output.msgType, append(mtdPath, internal.Method_outputTag))
+			if mn.output.streamKeyword != nil {
+				sci.newLoc(mn.output.streamKeyword, append(mtdPath, internal.Method_outputStreamTag))
+			}
+
+			// method options
+			r.generateSourceCodeInfoForOptions(&sci, mn.options, func(n interface{}) *optionNode {
+				return n.(*optionNode)
+			}, mtd.Options.GetUninterpretedOption(), append(mtdPath, internal.Method_optionsTag))
+		}
+	}
+	return &dpb.SourceCodeInfo{Location: sci.generateLocs()}
+}
+
+func (r *parseResult) generateSourceCodeInfoForOptions(sci *sourceCodeInfo, elements interface{}, extractor func(interface{}) *optionNode, uninterp []*dpb.UninterpretedOption, path []int32) {
+	// Known options are option node elements that have a corresponding
+	// path in r.interpretedOptions. We'll do those first.
+	rv := reflect.ValueOf(elements)
+	for i := 0; i < rv.Len(); i++ {
+		on := extractor(rv.Index(i).Interface())
+		if on == nil {
+			continue
+		}
+		optPath := r.interpretedOptions[on]
+		if len(optPath) > 0 {
+			p := path
+			if optPath[0] == -1 {
+				// used by "default" and "json_name" field pseudo-options
+				// to attribute path to parent element (since those are
+				// stored directly on the descriptor, not its options)
+				p = make([]int32, len(path)-1)
+				copy(p, path)
+				optPath = optPath[1:]
+			}
+			sci.newLoc(on, append(p, optPath...))
+		}
+	}
+
+	// Now uninterpreted options
+	for i, uo := range uninterp {
+		optPath := append(path, internal.UninterpretedOptionsTag, int32(i))
+		on := r.getOptionNode(uo).(*optionNode)
+		sci.newLoc(on, optPath)
+
+		var valTag int32
+		switch {
+		case uo.IdentifierValue != nil:
+			valTag = internal.Uninterpreted_identTag
+		case uo.PositiveIntValue != nil:
+			valTag = internal.Uninterpreted_posIntTag
+		case uo.NegativeIntValue != nil:
+			valTag = internal.Uninterpreted_negIntTag
+		case uo.DoubleValue != nil:
+			valTag = internal.Uninterpreted_doubleTag
+		case uo.StringValue != nil:
+			valTag = internal.Uninterpreted_stringTag
+		case uo.AggregateValue != nil:
+			valTag = internal.Uninterpreted_aggregateTag
+		}
+		if valTag != 0 {
+			sci.newLoc(on.val, append(optPath, valTag))
+		}
+
+		for j, n := range uo.Name {
+			optNmPath := append(optPath, internal.Uninterpreted_nameTag, int32(j))
+			nn := r.getOptionNamePartNode(n).(*optionNamePartNode)
+			sci.newLoc(nn, optNmPath)
+			sci.newLoc(nn.text, append(optNmPath, internal.UninterpretedName_nameTag))
+		}
+	}
+}
+
+func (r *parseResult) generateSourceCodeInfoForMessage(sci *sourceCodeInfo, msg *dpb.DescriptorProto, path []int32) {
+	n := r.getMessageNode(msg)
+	sci.newLoc(n, path)
+
+	var decls []*messageElement
+	var resvdNames []*stringLiteralNode
+	switch n := n.(type) {
+	case *messageNode:
+		decls = n.decls
+		resvdNames = n.reserved
+	case *groupNode:
+		decls = n.decls
+		resvdNames = n.reserved
+	}
+	if decls == nil {
+		// map entry so nothing else to do
+		return
+	}
+
+	sci.newLoc(n.messageName(), append(path, internal.Message_nameTag))
+
+	// message options
+	r.generateSourceCodeInfoForOptions(sci, decls, func(n interface{}) *optionNode {
+		return n.(*messageElement).option
+	}, msg.Options.GetUninterpretedOption(), append(path, internal.Message_optionsTag))
+
+	// fields
+	for i, fld := range msg.GetField() {
+		r.generateSourceCodeInfoForField(sci, fld, append(path, internal.Message_fieldsTag, int32(i)))
+	}
+
+	// one-ofs
+	for i, ood := range msg.GetOneofDecl() {
+		oon := r.getOneOfNode(ood).(*oneOfNode)
+		ooPath := append(path, internal.Message_oneOfsTag, int32(i))
+		sci.newLoc(oon, ooPath)
+		sci.newLoc(oon.name, append(ooPath, internal.OneOf_nameTag))
+
+		// one-of options
+		r.generateSourceCodeInfoForOptions(sci, oon.decls, func(n interface{}) *optionNode {
+			return n.(*oneOfElement).option
+		}, ood.Options.GetUninterpretedOption(), append(ooPath, internal.OneOf_optionsTag))
+	}
+
+	// nested messages
+	for i, nm := range msg.GetNestedType() {
+		r.generateSourceCodeInfoForMessage(sci, nm, append(path, internal.Message_nestedMessagesTag, int32(i)))
+	}
+
+	// nested enums
+	for i, enum := range msg.GetEnumType() {
+		r.generateSourceCodeInfoForEnum(sci, enum, append(path, internal.Message_enumsTag, int32(i)))
+	}
+
+	// nested extensions
+	for i, ext := range msg.GetExtension() {
+		r.generateSourceCodeInfoForField(sci, ext, append(path, internal.Message_extensionsTag, int32(i)))
+	}
+
+	// extension ranges
+	for i, er := range msg.ExtensionRange {
+		rangePath := append(path, internal.Message_extensionRangeTag, int32(i))
+		rn := r.getExtensionRangeNode(er).(*rangeNode)
+		sci.newLoc(rn, rangePath)
+		sci.newLoc(rn.stNode, append(rangePath, internal.ExtensionRange_startTag))
+		if rn.stNode != rn.enNode {
+			sci.newLoc(rn.enNode, append(rangePath, internal.ExtensionRange_endTag))
+		}
+		// now we have to find the extension decl and options that correspond to this range :(
+		for _, d := range decls {
+			found := false
+			if d.extensionRange != nil {
+				for _, r := range d.extensionRange.ranges {
+					if rn == r {
+						found = true
+						break
+					}
+				}
+			}
+			if found {
+				r.generateSourceCodeInfoForOptions(sci, d.extensionRange.options, func(n interface{}) *optionNode {
+					return n.(*optionNode)
+				}, er.Options.GetUninterpretedOption(), append(rangePath, internal.ExtensionRange_optionsTag))
+				break
+			}
+		}
+	}
+
+	// reserved ranges
+	for i, rr := range msg.ReservedRange {
+		rangePath := append(path, internal.Message_reservedRangeTag, int32(i))
+		rn := r.getMessageReservedRangeNode(rr).(*rangeNode)
+		sci.newLoc(rn, rangePath)
+		sci.newLoc(rn.stNode, append(rangePath, internal.ReservedRange_startTag))
+		if rn.stNode != rn.enNode {
+			sci.newLoc(rn.enNode, append(rangePath, internal.ReservedRange_endTag))
+		}
+	}
+
+	// reserved names
+	for i, n := range resvdNames {
+		sci.newLoc(n, append(path, internal.Message_reservedNameTag, int32(i)))
+	}
+}
+
+func (r *parseResult) generateSourceCodeInfoForEnum(sci *sourceCodeInfo, enum *dpb.EnumDescriptorProto, path []int32) {
+	n := r.getEnumNode(enum).(*enumNode)
+	sci.newLoc(n, path)
+	sci.newLoc(n.name, append(path, internal.Enum_nameTag))
+
+	// enum options
+	r.generateSourceCodeInfoForOptions(sci, n.decls, func(n interface{}) *optionNode {
+		return n.(*enumElement).option
+	}, enum.Options.GetUninterpretedOption(), append(path, internal.Enum_optionsTag))
+
+	// enum values
+	for j, ev := range enum.GetValue() {
+		evn := r.getEnumValueNode(ev).(*enumValueNode)
+		evPath := append(path, internal.Enum_valuesTag, int32(j))
+		sci.newLoc(evn, evPath)
+		sci.newLoc(evn.name, append(evPath, internal.EnumVal_nameTag))
+		sci.newLoc(evn.getNumber(), append(evPath, internal.EnumVal_numberTag))
+
+		// enum value options
+		r.generateSourceCodeInfoForOptions(sci, evn.options, func(n interface{}) *optionNode {
+			return n.(*optionNode)
+		}, ev.Options.GetUninterpretedOption(), append(evPath, internal.EnumVal_optionsTag))
+	}
+
+	// reserved ranges
+	for i, rr := range enum.GetReservedRange() {
+		rangePath := append(path, internal.Enum_reservedRangeTag, int32(i))
+		rn := r.getEnumReservedRangeNode(rr).(*rangeNode)
+		sci.newLoc(rn, rangePath)
+		sci.newLoc(rn.stNode, append(rangePath, internal.ReservedRange_startTag))
+		if rn.stNode != rn.enNode {
+			sci.newLoc(rn.enNode, append(rangePath, internal.ReservedRange_endTag))
+		}
+	}
+
+	// reserved names
+	for i, rn := range n.reserved {
+		sci.newLoc(rn, append(path, internal.Enum_reservedNameTag, int32(i)))
+	}
+}
+
+func (r *parseResult) generateSourceCodeInfoForField(sci *sourceCodeInfo, fld *dpb.FieldDescriptorProto, path []int32) {
+	n := r.getFieldNode(fld)
+
+	isGroup := false
+	var opts []*optionNode
+	var extendee *extendNode
+	switch n := n.(type) {
+	case *fieldNode:
+		opts = n.options
+		extendee = n.extendee
+	case *mapFieldNode:
+		opts = n.options
+	case *groupNode:
+		isGroup = true
+		extendee = n.extendee
+	case *syntheticMapField:
+		// shouldn't get here since we don't recurse into fields from a mapNode
+		// in generateSourceCodeInfoForMessage... but just in case
+		return
+	}
+
+	sci.newLoc(n, path)
+	if !isGroup {
+		sci.newLoc(n.fieldName(), append(path, internal.Field_nameTag))
+		sci.newLoc(n.fieldType(), append(path, internal.Field_typeTag))
+	}
+	if n.fieldLabel() != nil {
+		sci.newLoc(n.fieldLabel(), append(path, internal.Field_labelTag))
+	}
+	sci.newLoc(n.fieldTag(), append(path, internal.Field_numberTag))
+	if extendee != nil {
+		sci.newLoc(extendee.extendee, append(path, internal.Field_extendeeTag))
+	}
+
+	r.generateSourceCodeInfoForOptions(sci, opts, func(n interface{}) *optionNode {
+		return n.(*optionNode)
+	}, fld.Options.GetUninterpretedOption(), append(path, internal.Field_optionsTag))
+}
+
+type sourceCodeInfo struct {
+	locs         []*dpb.SourceCodeInfo_Location
+	commentsUsed map[*comment]struct{}
+}
+
+func (sci *sourceCodeInfo) newLoc(n node, path []int32) {
+	leadingComments := n.leadingComments()
+	trailingComments := n.trailingComments()
+	if sci.commentUsed(leadingComments) {
+		leadingComments = nil
+	}
+	if sci.commentUsed(trailingComments) {
+		trailingComments = nil
+	}
+	detached := groupComments(leadingComments)
+	trail := combineComments(trailingComments)
+	var lead *string
+	if len(leadingComments) > 0 && leadingComments[len(leadingComments)-1].end.Line >= n.start().Line-1 {
+		lead = proto.String(detached[len(detached)-1])
+		detached = detached[:len(detached)-1]
+	}
+	dup := make([]int32, len(path))
+	copy(dup, path)
+	var span []int32
+	if n.start().Line == n.end().Line {
+		span = []int32{int32(n.start().Line) - 1, int32(n.start().Col) - 1, int32(n.end().Col) - 1}
+	} else {
+		span = []int32{int32(n.start().Line) - 1, int32(n.start().Col) - 1, int32(n.end().Line) - 1, int32(n.end().Col) - 1}
+	}
+	sci.locs = append(sci.locs, &dpb.SourceCodeInfo_Location{
+		LeadingDetachedComments: detached,
+		LeadingComments:         lead,
+		TrailingComments:        trail,
+		Path:                    dup,
+		Span:                    span,
+	})
+}
+
+func (sci *sourceCodeInfo) commentUsed(c []*comment) bool {
+	if len(c) == 0 {
+		return false
+	}
+	if _, ok := sci.commentsUsed[c[0]]; ok {
+		return true
+	}
+
+	sci.commentsUsed[c[0]] = struct{}{}
+	return false
+}
+
+func groupComments(comments []*comment) []string {
+	if len(comments) == 0 {
+		return nil
+	}
+
+	var groups []string
+	singleLineStyle := comments[0].text[:2] == "//"
+	line := comments[0].end.Line
+	start := 0
+	for i := 1; i < len(comments); i++ {
+		c := comments[i]
+		prevSingleLine := singleLineStyle
+		singleLineStyle = strings.HasPrefix(comments[i].text, "//")
+		if !singleLineStyle || prevSingleLine != singleLineStyle || c.start.Line > line+1 {
+			// new group!
+			groups = append(groups, *combineComments(comments[start:i]))
+			start = i
+		}
+		line = c.end.Line
+	}
+	// don't forget last group
+	groups = append(groups, *combineComments(comments[start:]))
+
+	return groups
+}
+
+func combineComments(comments []*comment) *string {
+	if len(comments) == 0 {
+		return nil
+	}
+	first := true
+	var buf bytes.Buffer
+	for _, c := range comments {
+		if first {
+			first = false
+		} else {
+			buf.WriteByte('\n')
+		}
+		if c.text[:2] == "//" {
+			buf.WriteString(c.text[2:])
+		} else {
+			lines := strings.Split(c.text[2:len(c.text)-2], "\n")
+			first := true
+			for _, l := range lines {
+				if first {
+					first = false
+				} else {
+					buf.WriteByte('\n')
+				}
+
+				// strip a prefix of whitespace followed by '*'
+				j := 0
+				for j < len(l) {
+					if l[j] != ' ' && l[j] != '\t' {
+						break
+					}
+					j++
+				}
+				if j == len(l) {
+					l = ""
+				} else if l[j] == '*' {
+					l = l[j+1:]
+				} else if j > 0 {
+					l = " " + l[j:]
+				}
+
+				buf.WriteString(l)
+			}
+		}
+	}
+	return proto.String(buf.String())
+}
+
+func (sci *sourceCodeInfo) generateLocs() []*dpb.SourceCodeInfo_Location {
+	// generate intermediate locations: paths between root (inclusive) and the
+	// leaf locations already created, these will not have comments but will
+	// have aggregate span, than runs from min(start pos) to max(end pos) for
+	// all descendent paths.
+
+	if len(sci.locs) == 0 {
+		// nothing to generate
+		return nil
+	}
+
+	var root locTrie
+	for _, loc := range sci.locs {
+		root.add(loc.Path, loc)
+	}
+	root.fillIn()
+	locs := make([]*dpb.SourceCodeInfo_Location, 0, root.countLocs())
+	root.aggregate(&locs)
+	// finally, sort the resulting slice by location
+	sort.Slice(locs, func(i, j int) bool {
+		startI, endI := getSpanPositions(locs[i].Span)
+		startJ, endJ := getSpanPositions(locs[j].Span)
+		cmp := compareSlice(startI, startJ)
+		if cmp == 0 {
+			// if start position is the same, sort by end position _decreasing_
+			// (so enclosing locations will appear before leaves)
+			cmp = -compareSlice(endI, endJ)
+			if cmp == 0 {
+				// start and end position are the same? so break ties using path
+				cmp = compareSlice(locs[i].Path, locs[j].Path)
+			}
+		}
+		return cmp < 0
+	})
+	return locs
+}
+
+type locTrie struct {
+	children map[int32]*locTrie
+	loc      *dpb.SourceCodeInfo_Location
+}
+
+func (t *locTrie) add(path []int32, loc *dpb.SourceCodeInfo_Location) {
+	if len(path) == 0 {
+		t.loc = loc
+		return
+	}
+	child := t.children[path[0]]
+	if child == nil {
+		if t.children == nil {
+			t.children = map[int32]*locTrie{}
+		}
+		child = &locTrie{}
+		t.children[path[0]] = child
+	}
+	child.add(path[1:], loc)
+}
+
+func (t *locTrie) fillIn() {
+	var path []int32
+	var start, end []int32
+	for _, child := range t.children {
+		// recurse
+		child.fillIn()
+		if t.loc == nil {
+			// maintain min(start) and max(end) so we can
+			// populate t.loc below
+			childStart, childEnd := getSpanPositions(child.loc.Span)
+
+			if start == nil {
+				if path == nil {
+					path = child.loc.Path[:len(child.loc.Path)-1]
+				}
+				start = childStart
+				end = childEnd
+			} else {
+				if compareSlice(childStart, start) < 0 {
+					start = childStart
+				}
+				if compareSlice(childEnd, end) > 0 {
+					end = childEnd
+				}
+			}
+		}
+	}
+
+	if t.loc == nil {
+		var span []int32
+		// we don't use append below because we want a new slice
+		// that doesn't share underlying buffer with spans from
+		// any other location
+		if start[0] == end[0] {
+			span = []int32{start[0], start[1], end[1]}
+		} else {
+			span = []int32{start[0], start[1], end[0], end[1]}
+		}
+		t.loc = &dpb.SourceCodeInfo_Location{
+			Path: path,
+			Span: span,
+		}
+	}
+}
+
+func (t *locTrie) countLocs() int {
+	count := 0
+	if t.loc != nil {
+		count = 1
+	}
+	for _, ch := range t.children {
+		count += ch.countLocs()
+	}
+	return count
+}
+
+func (t *locTrie) aggregate(dest *[]*dpb.SourceCodeInfo_Location) {
+	if t.loc != nil {
+		*dest = append(*dest, t.loc)
+	}
+	for _, child := range t.children {
+		child.aggregate(dest)
+	}
+}
+
+func getSpanPositions(span []int32) (start, end []int32) {
+	start = span[:2]
+	if len(span) == 3 {
+		end = []int32{span[0], span[2]}
+	} else {
+		end = span[2:]
+	}
+	return
+}
+
+func compareSlice(a, b []int32) int {
+	end := len(a)
+	if len(b) < end {
+		end = len(b)
+	}
+	for i := 0; i < end; i++ {
+		if a[i] < b[i] {
+			return -1
+		}
+		if a[i] > b[i] {
+			return 1
+		}
+	}
+	if len(a) < len(b) {
+		return -1
+	}
+	if len(a) > len(b) {
+		return 1
+	}
+	return 0
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/std_imports.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/std_imports.go
new file mode 100644
index 0000000..59bcdd3
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/std_imports.go
@@ -0,0 +1,49 @@
+package protoparse
+
+import (
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+	// link in packages that include the standard protos included with protoc
+	_ "github.com/golang/protobuf/protoc-gen-go/plugin"
+	_ "github.com/golang/protobuf/ptypes/any"
+	_ "github.com/golang/protobuf/ptypes/duration"
+	_ "github.com/golang/protobuf/ptypes/empty"
+	_ "github.com/golang/protobuf/ptypes/struct"
+	_ "github.com/golang/protobuf/ptypes/timestamp"
+	_ "github.com/golang/protobuf/ptypes/wrappers"
+	_ "google.golang.org/genproto/protobuf/api"
+	_ "google.golang.org/genproto/protobuf/field_mask"
+	_ "google.golang.org/genproto/protobuf/ptype"
+	_ "google.golang.org/genproto/protobuf/source_context"
+
+	"github.com/jhump/protoreflect/internal"
+)
+
+// All files that are included with protoc are also included with this package
+// so that clients do not need to explicitly supply a copy of these protos (just
+// like callers of protoc do not need to supply them).
+var standardImports map[string]*dpb.FileDescriptorProto
+
+func init() {
+	standardFilenames := []string{
+		"google/protobuf/any.proto",
+		"google/protobuf/api.proto",
+		"google/protobuf/compiler/plugin.proto",
+		"google/protobuf/descriptor.proto",
+		"google/protobuf/duration.proto",
+		"google/protobuf/empty.proto",
+		"google/protobuf/field_mask.proto",
+		"google/protobuf/source_context.proto",
+		"google/protobuf/struct.proto",
+		"google/protobuf/timestamp.proto",
+		"google/protobuf/type.proto",
+		"google/protobuf/wrappers.proto",
+	}
+
+	standardImports = map[string]*dpb.FileDescriptorProto{}
+	for _, fn := range standardFilenames {
+		fd, err := internal.LoadFileDescriptor(fn)
+		if err == nil {
+			standardImports[fn] = fd
+		}
+	}
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/test-source-info.txt b/vendor/github.com/jhump/protoreflect/desc/protoparse/test-source-info.txt
new file mode 100644
index 0000000..c03fd64
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/test-source-info.txt
@@ -0,0 +1,1696 @@
+---- desc_test_comments.proto ----
+
+
+:
+desc_test_comments.proto:8:1
+desc_test_comments.proto:119:2
+
+
+ > syntax:
+desc_test_comments.proto:8:1
+desc_test_comments.proto:8:19
+    Leading detached comment [0]:
+ This is the first detached comment for the syntax.
+    Leading detached comment [1]:
+
+ This is a second detached comment.
+
+    Leading detached comment [2]:
+ This is a third.
+    Leading comments:
+ Syntax comment...
+    Trailing comments:
+ Syntax trailer.
+
+
+ > package:
+desc_test_comments.proto:12:1
+desc_test_comments.proto:12:17
+    Leading comments:
+ And now the package declaration
+
+
+ > options:
+desc_test_comments.proto:15:1
+desc_test_comments.proto:15:75
+
+
+ > options > go_package:
+desc_test_comments.proto:15:1
+desc_test_comments.proto:15:75
+    Leading comments:
+ option comments FTW!!!
+
+
+ > dependency:
+desc_test_comments.proto:17:1
+desc_test_comments.proto:18:34
+
+
+ > dependency[0]:
+desc_test_comments.proto:17:1
+desc_test_comments.proto:17:38
+
+
+ > dependency[1]:
+desc_test_comments.proto:18:1
+desc_test_comments.proto:18:34
+
+
+ > message_type:
+desc_test_comments.proto:25:1
+desc_test_comments.proto:89:2
+
+
+ > message_type[0]:
+desc_test_comments.proto:25:1
+desc_test_comments.proto:89:2
+    Leading detached comment [0]:
+ Multiple white space lines (like above) cannot
+ be preserved...
+    Leading comments:
+ We need a request for our RPC service below.
+    Trailing comments:
+ And next we'll need some extensions...
+
+
+ > message_type[0] > name:
+desc_test_comments.proto:25:68
+desc_test_comments.proto:25:75
+    Leading detached comment [0]:
+ detached message name 
+    Leading comments:
+ request with a capital R 
+    Trailing comments:
+ trailer
+
+
+ > message_type[0] > options:
+desc_test_comments.proto:26:3
+desc_test_comments.proto:35:54
+
+
+ > message_type[0] > options > deprecated:
+desc_test_comments.proto:26:3
+desc_test_comments.proto:26:28
+
+
+ > message_type[0] > field:
+desc_test_comments.proto:29:2
+desc_test_comments.proto:66:3
+
+
+ > message_type[0] > field[0]:
+desc_test_comments.proto:29:2
+desc_test_comments.proto:32:92
+    Leading comments:
+ A field comment
+    Trailing comments:
+ field trailer #1...
+
+
+ > message_type[0] > field[0] > label:
+desc_test_comments.proto:29:2
+desc_test_comments.proto:29:10
+
+
+ > message_type[0] > field[0] > type:
+desc_test_comments.proto:29:11
+desc_test_comments.proto:29:16
+
+
+ > message_type[0] > field[0] > name:
+desc_test_comments.proto:29:17
+desc_test_comments.proto:29:20
+
+
+ > message_type[0] > field[0] > number:
+desc_test_comments.proto:29:63
+desc_test_comments.proto:29:64
+    Leading detached comment [0]:
+ detached tag 
+    Leading comments:
+ tag numero uno 
+    Trailing comments:
+ tag trailer
+ that spans multiple lines...
+ more than two. 
+
+
+ > message_type[0] > field[0] > options:
+desc_test_comments.proto:32:5
+desc_test_comments.proto:32:90
+
+
+ > message_type[0] > field[0] > options > packed:
+desc_test_comments.proto:32:5
+desc_test_comments.proto:32:16
+
+
+ > message_type[0] > field[0] > json_name:
+desc_test_comments.proto:32:18
+desc_test_comments.proto:32:35
+
+
+ > message_type[0] > field[0] > options > ffubar:
+desc_test_comments.proto:32:37
+desc_test_comments.proto:32:62
+
+
+ > message_type[0] > field[0] > options > ffubar[0]:
+desc_test_comments.proto:32:37
+desc_test_comments.proto:32:62
+
+
+ > message_type[0] > field[0] > options > ffubarb:
+desc_test_comments.proto:32:64
+desc_test_comments.proto:32:90
+
+
+ > message_type[0] > options > mfubar:
+desc_test_comments.proto:35:20
+desc_test_comments.proto:35:54
+    Leading comments:
+ lead mfubar 
+    Trailing comments:
+ trailing mfubar
+
+
+ > message_type[0] > field[1]:
+desc_test_comments.proto:42:22
+desc_test_comments.proto:43:63
+    Leading detached comment [0]:
+ some detached comments
+    Leading detached comment [1]:
+ some detached comments
+    Leading detached comment [2]:
+ Another field comment
+    Leading comments:
+ label comment 
+
+
+ > message_type[0] > field[1] > label:
+desc_test_comments.proto:42:22
+desc_test_comments.proto:42:30
+
+
+ > message_type[0] > field[1] > type:
+desc_test_comments.proto:42:50
+desc_test_comments.proto:42:56
+    Leading comments:
+ type comment 
+
+
+ > message_type[0] > field[1] > name:
+desc_test_comments.proto:42:76
+desc_test_comments.proto:42:80
+    Leading comments:
+ name comment 
+
+
+ > message_type[0] > field[1] > number:
+desc_test_comments.proto:42:83
+desc_test_comments.proto:42:84
+
+
+ > message_type[0] > field[1] > default_value:
+desc_test_comments.proto:43:23
+desc_test_comments.proto:43:40
+    Leading comments:
+ default lead 
+    Trailing comments:
+ default trail 
+
+
+ > message_type[0] > extension_range:
+desc_test_comments.proto:46:13
+desc_test_comments.proto:47:23
+
+
+ > message_type[0] > extension_range[0]:
+desc_test_comments.proto:46:13
+desc_test_comments.proto:46:23
+
+
+ > message_type[0] > extension_range[0] > start:
+desc_test_comments.proto:46:13
+desc_test_comments.proto:46:16
+
+
+ > message_type[0] > extension_range[0] > end:
+desc_test_comments.proto:46:20
+desc_test_comments.proto:46:23
+
+
+ > message_type[0] > extension_range[1]:
+desc_test_comments.proto:47:13
+desc_test_comments.proto:47:23
+
+
+ > message_type[0] > extension_range[1] > start:
+desc_test_comments.proto:47:13
+desc_test_comments.proto:47:16
+
+
+ > message_type[0] > extension_range[1] > end:
+desc_test_comments.proto:47:20
+desc_test_comments.proto:47:23
+
+
+ > message_type[0] > extension_range[1] > options:
+desc_test_comments.proto:47:25
+desc_test_comments.proto:47:100
+
+
+ > message_type[0] > extension_range[1] > options > exfubarb:
+desc_test_comments.proto:47:25
+desc_test_comments.proto:47:67
+
+
+ > message_type[0] > extension_range[1] > options > exfubar:
+desc_test_comments.proto:47:69
+desc_test_comments.proto:47:100
+
+
+ > message_type[0] > extension_range[1] > options > exfubar[0]:
+desc_test_comments.proto:47:69
+desc_test_comments.proto:47:100
+
+
+ > message_type[0] > reserved_range:
+desc_test_comments.proto:51:50
+desc_test_comments.proto:51:68
+
+
+ > message_type[0] > reserved_range[0]:
+desc_test_comments.proto:51:50
+desc_test_comments.proto:51:58
+
+
+ > message_type[0] > reserved_range[0] > start:
+desc_test_comments.proto:51:50
+desc_test_comments.proto:51:52
+
+
+ > message_type[0] > reserved_range[0] > end:
+desc_test_comments.proto:51:56
+desc_test_comments.proto:51:58
+
+
+ > message_type[0] > reserved_range[1]:
+desc_test_comments.proto:51:60
+desc_test_comments.proto:51:68
+
+
+ > message_type[0] > reserved_range[1] > start:
+desc_test_comments.proto:51:60
+desc_test_comments.proto:51:62
+
+
+ > message_type[0] > reserved_range[1] > end:
+desc_test_comments.proto:51:66
+desc_test_comments.proto:51:68
+
+
+ > message_type[0] > reserved_name:
+desc_test_comments.proto:52:11
+desc_test_comments.proto:52:30
+
+
+ > message_type[0] > reserved_name[0]:
+desc_test_comments.proto:52:11
+desc_test_comments.proto:52:16
+
+
+ > message_type[0] > reserved_name[1]:
+desc_test_comments.proto:52:18
+desc_test_comments.proto:52:23
+
+
+ > message_type[0] > reserved_name[2]:
+desc_test_comments.proto:52:25
+desc_test_comments.proto:52:30
+
+
+ > message_type[0] > field[2]:
+desc_test_comments.proto:55:2
+desc_test_comments.proto:66:3
+    Leading comments:
+ Group comment
+
+
+ > message_type[0] > nested_type:
+desc_test_comments.proto:55:2
+desc_test_comments.proto:66:3
+
+
+ > message_type[0] > nested_type[0]:
+desc_test_comments.proto:55:2
+desc_test_comments.proto:66:3
+
+
+ > message_type[0] > field[2] > label:
+desc_test_comments.proto:55:2
+desc_test_comments.proto:55:10
+
+
+ > message_type[0] > nested_type[0] > name:
+desc_test_comments.proto:55:34
+desc_test_comments.proto:55:40
+    Leading comments:
+ group name 
+
+
+ > message_type[0] > field[2] > number:
+desc_test_comments.proto:55:43
+desc_test_comments.proto:55:44
+
+
+ > message_type[0] > nested_type[0] > options:
+desc_test_comments.proto:56:3
+desc_test_comments.proto:61:50
+
+
+ > message_type[0] > nested_type[0] > options > mfubar:
+desc_test_comments.proto:56:3
+desc_test_comments.proto:56:38
+
+
+ > message_type[0] > nested_type[0] > field:
+desc_test_comments.proto:58:3
+desc_test_comments.proto:64:27
+
+
+ > message_type[0] > nested_type[0] > field[0]:
+desc_test_comments.proto:58:3
+desc_test_comments.proto:58:27
+
+
+ > message_type[0] > nested_type[0] > field[0] > label:
+desc_test_comments.proto:58:3
+desc_test_comments.proto:58:11
+
+
+ > message_type[0] > nested_type[0] > field[0] > type:
+desc_test_comments.proto:58:12
+desc_test_comments.proto:58:18
+
+
+ > message_type[0] > nested_type[0] > field[0] > name:
+desc_test_comments.proto:58:19
+desc_test_comments.proto:58:22
+
+
+ > message_type[0] > nested_type[0] > field[0] > number:
+desc_test_comments.proto:58:25
+desc_test_comments.proto:58:26
+
+
+ > message_type[0] > nested_type[0] > field[1]:
+desc_test_comments.proto:59:3
+desc_test_comments.proto:59:26
+
+
+ > message_type[0] > nested_type[0] > field[1] > label:
+desc_test_comments.proto:59:3
+desc_test_comments.proto:59:11
+
+
+ > message_type[0] > nested_type[0] > field[1] > type:
+desc_test_comments.proto:59:12
+desc_test_comments.proto:59:17
+
+
+ > message_type[0] > nested_type[0] > field[1] > name:
+desc_test_comments.proto:59:18
+desc_test_comments.proto:59:21
+
+
+ > message_type[0] > nested_type[0] > field[1] > number:
+desc_test_comments.proto:59:24
+desc_test_comments.proto:59:25
+
+
+ > message_type[0] > nested_type[0] > options > no_standard_descriptor_accessor:
+desc_test_comments.proto:61:3
+desc_test_comments.proto:61:50
+
+
+ > message_type[0] > nested_type[0] > field[2]:
+desc_test_comments.proto:64:3
+desc_test_comments.proto:64:27
+    Leading comments:
+ Leading comment...
+    Trailing comments:
+ Trailing comment...
+
+
+ > message_type[0] > nested_type[0] > field[2] > label:
+desc_test_comments.proto:64:3
+desc_test_comments.proto:64:11
+
+
+ > message_type[0] > nested_type[0] > field[2] > type:
+desc_test_comments.proto:64:12
+desc_test_comments.proto:64:18
+
+
+ > message_type[0] > nested_type[0] > field[2] > name:
+desc_test_comments.proto:64:19
+desc_test_comments.proto:64:22
+
+
+ > message_type[0] > nested_type[0] > field[2] > number:
+desc_test_comments.proto:64:25
+desc_test_comments.proto:64:26
+
+
+ > message_type[0] > enum_type:
+desc_test_comments.proto:68:2
+desc_test_comments.proto:88:3
+
+
+ > message_type[0] > enum_type[0]:
+desc_test_comments.proto:68:2
+desc_test_comments.proto:88:3
+
+
+ > message_type[0] > enum_type[0] > name:
+desc_test_comments.proto:68:7
+desc_test_comments.proto:68:22
+    Trailing comments:
+ "super"!
+
+
+ > message_type[0] > enum_type[0] > value:
+desc_test_comments.proto:72:3
+desc_test_comments.proto:85:17
+
+
+ > message_type[0] > enum_type[0] > value[0]:
+desc_test_comments.proto:72:3
+desc_test_comments.proto:72:72
+
+
+ > message_type[0] > enum_type[0] > value[0] > name:
+desc_test_comments.proto:72:3
+desc_test_comments.proto:72:8
+
+
+ > message_type[0] > enum_type[0] > value[0] > number:
+desc_test_comments.proto:72:11
+desc_test_comments.proto:72:12
+
+
+ > message_type[0] > enum_type[0] > value[0] > options:
+desc_test_comments.proto:72:14
+desc_test_comments.proto:72:70
+
+
+ > message_type[0] > enum_type[0] > value[0] > options > evfubars:
+desc_test_comments.proto:72:14
+desc_test_comments.proto:72:42
+
+
+ > message_type[0] > enum_type[0] > value[0] > options > evfubar:
+desc_test_comments.proto:72:44
+desc_test_comments.proto:72:70
+
+
+ > message_type[0] > enum_type[0] > value[1]:
+desc_test_comments.proto:73:3
+desc_test_comments.proto:73:86
+
+
+ > message_type[0] > enum_type[0] > value[1] > name:
+desc_test_comments.proto:73:3
+desc_test_comments.proto:73:8
+
+
+ > message_type[0] > enum_type[0] > value[1] > number:
+desc_test_comments.proto:73:11
+desc_test_comments.proto:73:12
+
+
+ > message_type[0] > enum_type[0] > value[1] > options:
+desc_test_comments.proto:73:15
+desc_test_comments.proto:73:84
+
+
+ > message_type[0] > enum_type[0] > value[1] > options > evfubaruf:
+desc_test_comments.proto:73:15
+desc_test_comments.proto:73:43
+
+
+ > message_type[0] > enum_type[0] > value[1] > options > evfubaru:
+desc_test_comments.proto:73:59
+desc_test_comments.proto:73:84
+
+
+ > message_type[0] > enum_type[0] > value[2]:
+desc_test_comments.proto:74:3
+desc_test_comments.proto:74:13
+
+
+ > message_type[0] > enum_type[0] > value[2] > name:
+desc_test_comments.proto:74:3
+desc_test_comments.proto:74:8
+
+
+ > message_type[0] > enum_type[0] > value[2] > number:
+desc_test_comments.proto:74:11
+desc_test_comments.proto:74:12
+
+
+ > message_type[0] > enum_type[0] > value[3]:
+desc_test_comments.proto:75:3
+desc_test_comments.proto:75:14
+
+
+ > message_type[0] > enum_type[0] > value[3] > name:
+desc_test_comments.proto:75:3
+desc_test_comments.proto:75:9
+
+
+ > message_type[0] > enum_type[0] > value[3] > number:
+desc_test_comments.proto:75:12
+desc_test_comments.proto:75:13
+
+
+ > message_type[0] > enum_type[0] > options:
+desc_test_comments.proto:77:3
+desc_test_comments.proto:87:36
+
+
+ > message_type[0] > enum_type[0] > options > efubars:
+desc_test_comments.proto:77:3
+desc_test_comments.proto:77:38
+
+
+ > message_type[0] > enum_type[0] > value[4]:
+desc_test_comments.proto:79:3
+desc_test_comments.proto:79:13
+
+
+ > message_type[0] > enum_type[0] > value[4] > name:
+desc_test_comments.proto:79:3
+desc_test_comments.proto:79:8
+
+
+ > message_type[0] > enum_type[0] > value[4] > number:
+desc_test_comments.proto:79:11
+desc_test_comments.proto:79:12
+
+
+ > message_type[0] > enum_type[0] > value[5]:
+desc_test_comments.proto:80:3
+desc_test_comments.proto:80:15
+
+
+ > message_type[0] > enum_type[0] > value[5] > name:
+desc_test_comments.proto:80:3
+desc_test_comments.proto:80:10
+
+
+ > message_type[0] > enum_type[0] > value[5] > number:
+desc_test_comments.proto:80:13
+desc_test_comments.proto:80:14
+
+
+ > message_type[0] > enum_type[0] > value[6]:
+desc_test_comments.proto:81:3
+desc_test_comments.proto:81:46
+
+
+ > message_type[0] > enum_type[0] > value[6] > name:
+desc_test_comments.proto:81:3
+desc_test_comments.proto:81:10
+
+
+ > message_type[0] > enum_type[0] > value[6] > number:
+desc_test_comments.proto:81:13
+desc_test_comments.proto:81:14
+
+
+ > message_type[0] > enum_type[0] > value[6] > options:
+desc_test_comments.proto:81:16
+desc_test_comments.proto:81:44
+
+
+ > message_type[0] > enum_type[0] > value[6] > options > evfubarsf:
+desc_test_comments.proto:81:16
+desc_test_comments.proto:81:44
+
+
+ > message_type[0] > enum_type[0] > value[7]:
+desc_test_comments.proto:82:3
+desc_test_comments.proto:82:14
+
+
+ > message_type[0] > enum_type[0] > value[7] > name:
+desc_test_comments.proto:82:3
+desc_test_comments.proto:82:9
+
+
+ > message_type[0] > enum_type[0] > value[7] > number:
+desc_test_comments.proto:82:12
+desc_test_comments.proto:82:13
+
+
+ > message_type[0] > enum_type[0] > value[8]:
+desc_test_comments.proto:83:3
+desc_test_comments.proto:83:17
+
+
+ > message_type[0] > enum_type[0] > value[8] > name:
+desc_test_comments.proto:83:3
+desc_test_comments.proto:83:12
+
+
+ > message_type[0] > enum_type[0] > value[8] > number:
+desc_test_comments.proto:83:15
+desc_test_comments.proto:83:16
+
+
+ > message_type[0] > enum_type[0] > value[9]:
+desc_test_comments.proto:84:3
+desc_test_comments.proto:84:13
+
+
+ > message_type[0] > enum_type[0] > value[9] > name:
+desc_test_comments.proto:84:3
+desc_test_comments.proto:84:8
+
+
+ > message_type[0] > enum_type[0] > value[9] > number:
+desc_test_comments.proto:84:11
+desc_test_comments.proto:84:12
+
+
+ > message_type[0] > enum_type[0] > value[10]:
+desc_test_comments.proto:85:3
+desc_test_comments.proto:85:17
+
+
+ > message_type[0] > enum_type[0] > value[10] > name:
+desc_test_comments.proto:85:3
+desc_test_comments.proto:85:9
+
+
+ > message_type[0] > enum_type[0] > value[10] > number:
+desc_test_comments.proto:85:12
+desc_test_comments.proto:85:16
+
+
+ > message_type[0] > enum_type[0] > options > efubar:
+desc_test_comments.proto:87:3
+desc_test_comments.proto:87:36
+
+
+ > extension[0] > extendee:
+desc_test_comments.proto:94:1
+desc_test_comments.proto:94:8
+    Leading comments:
+ extendee comment
+
+
+ > extension[1] > extendee:
+desc_test_comments.proto:94:1
+desc_test_comments.proto:94:8
+
+
+ > extension:
+desc_test_comments.proto:96:2
+desc_test_comments.proto:98:30
+
+
+ > extension[0]:
+desc_test_comments.proto:96:2
+desc_test_comments.proto:96:30
+    Leading comments:
+ comment for guid1
+
+
+ > extension[0] > label:
+desc_test_comments.proto:96:2
+desc_test_comments.proto:96:10
+
+
+ > extension[0] > type:
+desc_test_comments.proto:96:11
+desc_test_comments.proto:96:17
+
+
+ > extension[0] > name:
+desc_test_comments.proto:96:18
+desc_test_comments.proto:96:23
+
+
+ > extension[0] > number:
+desc_test_comments.proto:96:26
+desc_test_comments.proto:96:29
+
+
+ > extension[1]:
+desc_test_comments.proto:98:2
+desc_test_comments.proto:98:30
+    Leading comments:
+ ... and a comment for guid2
+
+
+ > extension[1] > label:
+desc_test_comments.proto:98:2
+desc_test_comments.proto:98:10
+
+
+ > extension[1] > type:
+desc_test_comments.proto:98:11
+desc_test_comments.proto:98:17
+
+
+ > extension[1] > name:
+desc_test_comments.proto:98:18
+desc_test_comments.proto:98:23
+
+
+ > extension[1] > number:
+desc_test_comments.proto:98:26
+desc_test_comments.proto:98:29
+
+
+ > service:
+desc_test_comments.proto:103:1
+desc_test_comments.proto:119:2
+
+
+ > service[0]:
+desc_test_comments.proto:103:1
+desc_test_comments.proto:119:2
+    Leading comments:
+ Service comment
+    Trailing comments:
+ service trailer
+
+
+ > service[0] > name:
+desc_test_comments.proto:103:28
+desc_test_comments.proto:103:38
+    Leading comments:
+ service name 
+
+
+ > service[0] > options:
+desc_test_comments.proto:104:2
+desc_test_comments.proto:108:38
+
+
+ > service[0] > options > sfubar:
+desc_test_comments.proto:104:2
+desc_test_comments.proto:105:40
+
+
+ > service[0] > options > sfubar > id:
+desc_test_comments.proto:104:2
+desc_test_comments.proto:104:36
+
+
+ > service[0] > options > sfubar > name:
+desc_test_comments.proto:105:2
+desc_test_comments.proto:105:40
+
+
+ > service[0] > options > deprecated:
+desc_test_comments.proto:106:2
+desc_test_comments.proto:106:28
+
+
+ > service[0] > options > sfubare:
+desc_test_comments.proto:108:2
+desc_test_comments.proto:108:38
+
+
+ > service[0] > method:
+desc_test_comments.proto:111:2
+desc_test_comments.proto:118:3
+
+
+ > service[0] > method[0]:
+desc_test_comments.proto:111:2
+desc_test_comments.proto:112:70
+    Leading comments:
+ Method comment
+
+
+ > service[0] > method[0] > name:
+desc_test_comments.proto:111:21
+desc_test_comments.proto:111:33
+    Leading comments:
+ rpc name 
+    Trailing comments:
+ comment A 
+
+
+ > service[0] > method[0] > client_streaming:
+desc_test_comments.proto:111:66
+desc_test_comments.proto:111:72
+    Leading comments:
+ comment B 
+
+
+ > service[0] > method[0] > input_type:
+desc_test_comments.proto:111:89
+desc_test_comments.proto:111:96
+    Leading comments:
+ comment C 
+
+
+ > service[0] > method[0] > output_type:
+desc_test_comments.proto:112:43
+desc_test_comments.proto:112:50
+    Leading comments:
+comment E 
+
+
+ > service[0] > method[1]:
+desc_test_comments.proto:114:2
+desc_test_comments.proto:118:3
+
+
+ > service[0] > method[1] > name:
+desc_test_comments.proto:114:6
+desc_test_comments.proto:114:14
+
+
+ > service[0] > method[1] > input_type:
+desc_test_comments.proto:114:16
+desc_test_comments.proto:114:23
+
+
+ > service[0] > method[1] > output_type:
+desc_test_comments.proto:114:34
+desc_test_comments.proto:114:55
+
+
+ > service[0] > method[1] > options:
+desc_test_comments.proto:115:3
+desc_test_comments.proto:117:42
+
+
+ > service[0] > method[1] > options > deprecated:
+desc_test_comments.proto:115:3
+desc_test_comments.proto:115:28
+
+
+ > service[0] > method[1] > options > mtfubar:
+desc_test_comments.proto:116:3
+desc_test_comments.proto:116:39
+
+
+ > service[0] > method[1] > options > mtfubar[0]:
+desc_test_comments.proto:116:3
+desc_test_comments.proto:116:39
+
+
+ > service[0] > method[1] > options > mtfubard:
+desc_test_comments.proto:117:3
+desc_test_comments.proto:117:42
+---- desc_test_options.proto ----
+
+
+:
+desc_test_options.proto:1:1
+desc_test_options.proto:62:34
+
+
+ > syntax:
+desc_test_options.proto:1:1
+desc_test_options.proto:1:19
+
+
+ > options:
+desc_test_options.proto:3:1
+desc_test_options.proto:3:73
+
+
+ > options > go_package:
+desc_test_options.proto:3:1
+desc_test_options.proto:3:73
+
+
+ > package:
+desc_test_options.proto:5:1
+desc_test_options.proto:5:20
+
+
+ > dependency:
+desc_test_options.proto:7:1
+desc_test_options.proto:7:43
+
+
+ > dependency[0]:
+desc_test_options.proto:7:1
+desc_test_options.proto:7:43
+
+
+ > extension[0] > extendee:
+desc_test_options.proto:9:8
+desc_test_options.proto:9:38
+
+
+ > extension:
+desc_test_options.proto:10:2
+desc_test_options.proto:62:34
+
+
+ > extension[0]:
+desc_test_options.proto:10:2
+desc_test_options.proto:10:31
+
+
+ > extension[0] > label:
+desc_test_options.proto:10:2
+desc_test_options.proto:10:10
+
+
+ > extension[0] > type:
+desc_test_options.proto:10:11
+desc_test_options.proto:10:15
+
+
+ > extension[0] > name:
+desc_test_options.proto:10:16
+desc_test_options.proto:10:22
+
+
+ > extension[0] > number:
+desc_test_options.proto:10:25
+desc_test_options.proto:10:30
+
+
+ > extension[1] > extendee:
+desc_test_options.proto:13:8
+desc_test_options.proto:13:36
+
+
+ > extension[2] > extendee:
+desc_test_options.proto:13:8
+desc_test_options.proto:13:36
+
+
+ > extension[1]:
+desc_test_options.proto:14:2
+desc_test_options.proto:14:33
+
+
+ > extension[1] > label:
+desc_test_options.proto:14:2
+desc_test_options.proto:14:10
+
+
+ > extension[1] > type:
+desc_test_options.proto:14:11
+desc_test_options.proto:14:17
+
+
+ > extension[1] > name:
+desc_test_options.proto:14:18
+desc_test_options.proto:14:24
+
+
+ > extension[1] > number:
+desc_test_options.proto:14:27
+desc_test_options.proto:14:32
+
+
+ > extension[2]:
+desc_test_options.proto:15:2
+desc_test_options.proto:15:33
+
+
+ > extension[2] > label:
+desc_test_options.proto:15:2
+desc_test_options.proto:15:10
+
+
+ > extension[2] > type:
+desc_test_options.proto:15:11
+desc_test_options.proto:15:16
+
+
+ > extension[2] > name:
+desc_test_options.proto:15:17
+desc_test_options.proto:15:24
+
+
+ > extension[2] > number:
+desc_test_options.proto:15:27
+desc_test_options.proto:15:32
+
+
+ > extension[3] > extendee:
+desc_test_options.proto:18:8
+desc_test_options.proto:18:35
+
+
+ > extension[4] > extendee:
+desc_test_options.proto:18:8
+desc_test_options.proto:18:35
+
+
+ > extension[5] > extendee:
+desc_test_options.proto:18:8
+desc_test_options.proto:18:35
+
+
+ > extension[6] > extendee:
+desc_test_options.proto:18:8
+desc_test_options.proto:18:35
+
+
+ > extension[7] > extendee:
+desc_test_options.proto:18:8
+desc_test_options.proto:18:35
+
+
+ > extension[3]:
+desc_test_options.proto:19:2
+desc_test_options.proto:19:32
+
+
+ > extension[3] > label:
+desc_test_options.proto:19:2
+desc_test_options.proto:19:10
+
+
+ > extension[3] > type:
+desc_test_options.proto:19:11
+desc_test_options.proto:19:16
+
+
+ > extension[3] > name:
+desc_test_options.proto:19:17
+desc_test_options.proto:19:23
+
+
+ > extension[3] > number:
+desc_test_options.proto:19:26
+desc_test_options.proto:19:31
+
+
+ > extension[4]:
+desc_test_options.proto:20:2
+desc_test_options.proto:20:34
+
+
+ > extension[4] > label:
+desc_test_options.proto:20:2
+desc_test_options.proto:20:10
+
+
+ > extension[4] > type:
+desc_test_options.proto:20:11
+desc_test_options.proto:20:17
+
+
+ > extension[4] > name:
+desc_test_options.proto:20:18
+desc_test_options.proto:20:25
+
+
+ > extension[4] > number:
+desc_test_options.proto:20:28
+desc_test_options.proto:20:33
+
+
+ > extension[5]:
+desc_test_options.proto:21:2
+desc_test_options.proto:21:37
+
+
+ > extension[5] > label:
+desc_test_options.proto:21:2
+desc_test_options.proto:21:10
+
+
+ > extension[5] > type:
+desc_test_options.proto:21:11
+desc_test_options.proto:21:19
+
+
+ > extension[5] > name:
+desc_test_options.proto:21:20
+desc_test_options.proto:21:28
+
+
+ > extension[5] > number:
+desc_test_options.proto:21:31
+desc_test_options.proto:21:36
+
+
+ > extension[6]:
+desc_test_options.proto:22:2
+desc_test_options.proto:22:34
+
+
+ > extension[6] > label:
+desc_test_options.proto:22:2
+desc_test_options.proto:22:10
+
+
+ > extension[6] > type:
+desc_test_options.proto:22:11
+desc_test_options.proto:22:17
+
+
+ > extension[6] > name:
+desc_test_options.proto:22:18
+desc_test_options.proto:22:25
+
+
+ > extension[6] > number:
+desc_test_options.proto:22:28
+desc_test_options.proto:22:33
+
+
+ > extension[7]:
+desc_test_options.proto:23:2
+desc_test_options.proto:23:36
+
+
+ > extension[7] > label:
+desc_test_options.proto:23:2
+desc_test_options.proto:23:10
+
+
+ > extension[7] > type:
+desc_test_options.proto:23:11
+desc_test_options.proto:23:18
+
+
+ > extension[7] > name:
+desc_test_options.proto:23:19
+desc_test_options.proto:23:27
+
+
+ > extension[7] > number:
+desc_test_options.proto:23:30
+desc_test_options.proto:23:35
+
+
+ > extension[8] > extendee:
+desc_test_options.proto:26:8
+desc_test_options.proto:26:40
+
+
+ > extension[9] > extendee:
+desc_test_options.proto:26:8
+desc_test_options.proto:26:40
+
+
+ > extension[10] > extendee:
+desc_test_options.proto:26:8
+desc_test_options.proto:26:40
+
+
+ > extension[11] > extendee:
+desc_test_options.proto:26:8
+desc_test_options.proto:26:40
+
+
+ > extension[12] > extendee:
+desc_test_options.proto:26:8
+desc_test_options.proto:26:40
+
+
+ > extension[8]:
+desc_test_options.proto:27:2
+desc_test_options.proto:27:33
+
+
+ > extension[8] > label:
+desc_test_options.proto:27:2
+desc_test_options.proto:27:10
+
+
+ > extension[8] > type:
+desc_test_options.proto:27:11
+desc_test_options.proto:27:16
+
+
+ > extension[8] > name:
+desc_test_options.proto:27:17
+desc_test_options.proto:27:24
+
+
+ > extension[8] > number:
+desc_test_options.proto:27:27
+desc_test_options.proto:27:32
+
+
+ > extension[9]:
+desc_test_options.proto:28:2
+desc_test_options.proto:28:35
+
+
+ > extension[9] > label:
+desc_test_options.proto:28:2
+desc_test_options.proto:28:10
+
+
+ > extension[9] > type:
+desc_test_options.proto:28:11
+desc_test_options.proto:28:17
+
+
+ > extension[9] > name:
+desc_test_options.proto:28:18
+desc_test_options.proto:28:26
+
+
+ > extension[9] > number:
+desc_test_options.proto:28:29
+desc_test_options.proto:28:34
+
+
+ > extension[10]:
+desc_test_options.proto:29:2
+desc_test_options.proto:29:38
+
+
+ > extension[10] > label:
+desc_test_options.proto:29:2
+desc_test_options.proto:29:10
+
+
+ > extension[10] > type:
+desc_test_options.proto:29:11
+desc_test_options.proto:29:19
+
+
+ > extension[10] > name:
+desc_test_options.proto:29:20
+desc_test_options.proto:29:29
+
+
+ > extension[10] > number:
+desc_test_options.proto:29:32
+desc_test_options.proto:29:37
+
+
+ > extension[11]:
+desc_test_options.proto:30:2
+desc_test_options.proto:30:35
+
+
+ > extension[11] > label:
+desc_test_options.proto:30:2
+desc_test_options.proto:30:10
+
+
+ > extension[11] > type:
+desc_test_options.proto:30:11
+desc_test_options.proto:30:17
+
+
+ > extension[11] > name:
+desc_test_options.proto:30:18
+desc_test_options.proto:30:26
+
+
+ > extension[11] > number:
+desc_test_options.proto:30:29
+desc_test_options.proto:30:34
+
+
+ > extension[12]:
+desc_test_options.proto:31:2
+desc_test_options.proto:31:37
+
+
+ > extension[12] > label:
+desc_test_options.proto:31:2
+desc_test_options.proto:31:10
+
+
+ > extension[12] > type:
+desc_test_options.proto:31:11
+desc_test_options.proto:31:18
+
+
+ > extension[12] > name:
+desc_test_options.proto:31:19
+desc_test_options.proto:31:28
+
+
+ > extension[12] > number:
+desc_test_options.proto:31:31
+desc_test_options.proto:31:36
+
+
+ > extension[13] > extendee:
+desc_test_options.proto:34:8
+desc_test_options.proto:34:38
+
+
+ > extension[14] > extendee:
+desc_test_options.proto:34:8
+desc_test_options.proto:34:38
+
+
+ > extension[13]:
+desc_test_options.proto:35:2
+desc_test_options.proto:35:46
+
+
+ > extension[13] > label:
+desc_test_options.proto:35:2
+desc_test_options.proto:35:10
+
+
+ > extension[13] > type:
+desc_test_options.proto:35:11
+desc_test_options.proto:35:30
+
+
+ > extension[13] > name:
+desc_test_options.proto:35:31
+desc_test_options.proto:35:37
+
+
+ > extension[13] > number:
+desc_test_options.proto:35:40
+desc_test_options.proto:35:45
+
+
+ > extension[14]:
+desc_test_options.proto:36:2
+desc_test_options.proto:36:44
+
+
+ > extension[14] > label:
+desc_test_options.proto:36:2
+desc_test_options.proto:36:10
+
+
+ > extension[14] > type:
+desc_test_options.proto:36:11
+desc_test_options.proto:36:27
+
+
+ > extension[14] > name:
+desc_test_options.proto:36:28
+desc_test_options.proto:36:35
+
+
+ > extension[14] > number:
+desc_test_options.proto:36:38
+desc_test_options.proto:36:43
+
+
+ > extension[15] > extendee:
+desc_test_options.proto:39:8
+desc_test_options.proto:39:37
+
+
+ > extension[16] > extendee:
+desc_test_options.proto:39:8
+desc_test_options.proto:39:37
+
+
+ > extension[15]:
+desc_test_options.proto:40:2
+desc_test_options.proto:40:33
+
+
+ > extension[15] > label:
+desc_test_options.proto:40:2
+desc_test_options.proto:40:10
+
+
+ > extension[15] > type:
+desc_test_options.proto:40:11
+desc_test_options.proto:40:16
+
+
+ > extension[15] > name:
+desc_test_options.proto:40:17
+desc_test_options.proto:40:24
+
+
+ > extension[15] > number:
+desc_test_options.proto:40:27
+desc_test_options.proto:40:32
+
+
+ > extension[16]:
+desc_test_options.proto:41:2
+desc_test_options.proto:41:35
+
+
+ > extension[16] > label:
+desc_test_options.proto:41:2
+desc_test_options.proto:41:10
+
+
+ > extension[16] > type:
+desc_test_options.proto:41:11
+desc_test_options.proto:41:17
+
+
+ > extension[16] > name:
+desc_test_options.proto:41:18
+desc_test_options.proto:41:26
+
+
+ > extension[16] > number:
+desc_test_options.proto:41:29
+desc_test_options.proto:41:34
+
+
+ > message_type:
+desc_test_options.proto:45:1
+desc_test_options.proto:48:2
+
+
+ > message_type[0]:
+desc_test_options.proto:45:1
+desc_test_options.proto:48:2
+    Leading comments:
+ Test message used by custom options
+
+
+ > message_type[0] > name:
+desc_test_options.proto:45:9
+desc_test_options.proto:45:28
+
+
+ > message_type[0] > field:
+desc_test_options.proto:46:2
+desc_test_options.proto:47:27
+
+
+ > message_type[0] > field[0]:
+desc_test_options.proto:46:2
+desc_test_options.proto:46:25
+
+
+ > message_type[0] > field[0] > label:
+desc_test_options.proto:46:2
+desc_test_options.proto:46:10
+
+
+ > message_type[0] > field[0] > type:
+desc_test_options.proto:46:11
+desc_test_options.proto:46:17
+
+
+ > message_type[0] > field[0] > name:
+desc_test_options.proto:46:18
+desc_test_options.proto:46:20
+
+
+ > message_type[0] > field[0] > number:
+desc_test_options.proto:46:23
+desc_test_options.proto:46:24
+
+
+ > message_type[0] > field[1]:
+desc_test_options.proto:47:2
+desc_test_options.proto:47:27
+
+
+ > message_type[0] > field[1] > label:
+desc_test_options.proto:47:2
+desc_test_options.proto:47:10
+
+
+ > message_type[0] > field[1] > type:
+desc_test_options.proto:47:11
+desc_test_options.proto:47:17
+
+
+ > message_type[0] > field[1] > name:
+desc_test_options.proto:47:18
+desc_test_options.proto:47:22
+
+
+ > message_type[0] > field[1] > number:
+desc_test_options.proto:47:25
+desc_test_options.proto:47:26
+
+
+ > enum_type:
+desc_test_options.proto:51:1
+desc_test_options.proto:53:2
+
+
+ > enum_type[0]:
+desc_test_options.proto:51:1
+desc_test_options.proto:53:2
+    Leading comments:
+ Test enum used by custom options
+
+
+ > enum_type[0] > name:
+desc_test_options.proto:51:6
+desc_test_options.proto:51:22
+
+
+ > enum_type[0] > value:
+desc_test_options.proto:52:2
+desc_test_options.proto:52:12
+
+
+ > enum_type[0] > value[0]:
+desc_test_options.proto:52:2
+desc_test_options.proto:52:12
+
+
+ > enum_type[0] > value[0] > name:
+desc_test_options.proto:52:2
+desc_test_options.proto:52:7
+
+
+ > enum_type[0] > value[0] > number:
+desc_test_options.proto:52:10
+desc_test_options.proto:52:11
+
+
+ > extension[17] > extendee:
+desc_test_options.proto:55:8
+desc_test_options.proto:55:45
+
+
+ > extension[18] > extendee:
+desc_test_options.proto:55:8
+desc_test_options.proto:55:45
+
+
+ > extension[17]:
+desc_test_options.proto:56:2
+desc_test_options.proto:56:34
+
+
+ > extension[17] > label:
+desc_test_options.proto:56:2
+desc_test_options.proto:56:10
+
+
+ > extension[17] > type:
+desc_test_options.proto:56:11
+desc_test_options.proto:56:17
+
+
+ > extension[17] > name:
+desc_test_options.proto:56:18
+desc_test_options.proto:56:25
+
+
+ > extension[17] > number:
+desc_test_options.proto:56:28
+desc_test_options.proto:56:33
+
+
+ > extension[18]:
+desc_test_options.proto:57:2
+desc_test_options.proto:57:34
+
+
+ > extension[18] > label:
+desc_test_options.proto:57:2
+desc_test_options.proto:57:10
+
+
+ > extension[18] > type:
+desc_test_options.proto:57:11
+desc_test_options.proto:57:16
+
+
+ > extension[18] > name:
+desc_test_options.proto:57:17
+desc_test_options.proto:57:25
+
+
+ > extension[18] > number:
+desc_test_options.proto:57:28
+desc_test_options.proto:57:33
+
+
+ > extension[19] > extendee:
+desc_test_options.proto:60:8
+desc_test_options.proto:60:36
+
+
+ > extension[20] > extendee:
+desc_test_options.proto:60:8
+desc_test_options.proto:60:36
+
+
+ > extension[19]:
+desc_test_options.proto:61:2
+desc_test_options.proto:61:34
+
+
+ > extension[19] > label:
+desc_test_options.proto:61:2
+desc_test_options.proto:61:10
+
+
+ > extension[19] > type:
+desc_test_options.proto:61:11
+desc_test_options.proto:61:17
+
+
+ > extension[19] > name:
+desc_test_options.proto:61:18
+desc_test_options.proto:61:25
+
+
+ > extension[19] > number:
+desc_test_options.proto:61:28
+desc_test_options.proto:61:33
+
+
+ > extension[20]:
+desc_test_options.proto:62:2
+desc_test_options.proto:62:34
+
+
+ > extension[20] > label:
+desc_test_options.proto:62:2
+desc_test_options.proto:62:10
+
+
+ > extension[20] > type:
+desc_test_options.proto:62:11
+desc_test_options.proto:62:16
+
+
+ > extension[20] > name:
+desc_test_options.proto:62:17
+desc_test_options.proto:62:25
+
+
+ > extension[20] > number:
+desc_test_options.proto:62:28
+desc_test_options.proto:62:33
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoprint/doc.go b/vendor/github.com/jhump/protoreflect/desc/protoprint/doc.go
new file mode 100644
index 0000000..b56e8ac
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoprint/doc.go
@@ -0,0 +1,7 @@
+// Package protoprint provides a mechanism to generate protobuf source code
+// from descriptors.
+//
+// This can be useful to turn file descriptor sets (produced by protoc) back
+// into proto IDL code. Combined with the protoreflect/builder package, it can
+// also be used to perform code generation of proto source code.
+package protoprint
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoprint/print.go b/vendor/github.com/jhump/protoreflect/desc/protoprint/print.go
new file mode 100644
index 0000000..d8f7f22
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoprint/print.go
@@ -0,0 +1,2288 @@
+package protoprint
+
+import (
+	"bytes"
+	"fmt"
+	"io"
+	"math"
+	"os"
+	"path/filepath"
+	"reflect"
+	"sort"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+	"github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/desc"
+	"github.com/jhump/protoreflect/desc/internal"
+	"github.com/jhump/protoreflect/dynamic"
+)
+
+// Printer knows how to format file descriptors as proto source code. Its fields
+// provide some control over how the resulting source file is constructed and
+// formatted.
+type Printer struct {
+	// If true, comments are rendered using "/*" style comments. Otherwise, they
+	// are printed using "//" style line comments.
+	PreferMultiLineStyleComments bool
+
+	// If true, elements are sorted into a canonical order.
+	//
+	// The canonical order for elements in a file follows:
+	//  1. Syntax
+	//  2. Package
+	//  3. Imports (sorted lexically)
+	//  4. Options (sorted by name, standard options before custom options)
+	//  5. Messages (sorted by name)
+	//  6. Enums (sorted by name)
+	//  7. Services (sorted by name)
+	//  8. Extensions (grouped by extendee, sorted by extendee+tag)
+	//
+	// The canonical order of elements in a message follows:
+	//  1. Options (sorted by name, standard options before custom options)
+	//  2. Fields and One-Ofs (sorted by tag; one-ofs interleaved based on the
+	//     minimum tag therein)
+	//  3. Nested Messages (sorted by name)
+	//  4. Nested Enums (sorted by name)
+	//  5. Extension ranges (sorted by starting tag number)
+	//  6. Nested Extensions (grouped by extendee, sorted by extendee+tag)
+	//  7. Reserved ranges (sorted by starting tag number)
+	//  8. Reserved names (sorted lexically)
+	//
+	// Methods are sorted within a service by name and appear after any service
+	// options (which are sorted by name, standard options before custom ones).
+	// Enum values are sorted within an enum, first by numeric value then by
+	// name, and also appear after any enum options.
+	//
+	// Options for fields, enum values, and extension ranges are sorted by name,
+	// standard options before custom ones.
+	SortElements bool
+
+	// The indentation used. Any characters other than spaces or tabs will be
+	// replaced with spaces. If unset/empty, two spaces will be used.
+	Indent string
+
+	// If true, detached comments (between elements) will be ignored.
+	//
+	// Deprecated: Use OmitComments bitmask instead.
+	OmitDetachedComments bool
+
+	// A bitmask of comment types to omit. If unset, all comments will be
+	// included. Use CommentsAll to not print any comments.
+	OmitComments CommentType
+
+	// If true, trailing comments that typically appear on the same line as an
+	// element (option, field, enum value, method) will be printed on a separate
+	// line instead.
+	//
+	// So, with this set, you'll get output like so:
+	//
+	//    // leading comment for field
+	//    repeated string names = 1;
+	//    // trailing comment
+	//
+	// If left false, the printer will try to emit trailing comments on the same
+	// line instead:
+	//
+	//    // leading comment for field
+	//    repeated string names = 1; // trailing comment
+	//
+	// If the trailing comment has more than one line, it will automatically be
+	// forced to the next line. Also, elements that end with "}" instead of ";"
+	// will have trailing comments rendered on the subsequent line.
+	TrailingCommentsOnSeparateLine bool
+
+	// If true, the printed output will eschew any blank lines, which otherwise
+	// appear between descriptor elements and comment blocks. Note that this if
+	// detached comments are being printed, this will cause them to be merged
+	// into the subsequent leading comments. Similarly, any element trailing
+	// comments will be merged into the subsequent leading comments.
+	Compact bool
+
+	// If true, all references to messages, extensions, and enums (such as in
+	// options, field types, and method request and response types) will be
+	// fully-qualified. When left unset, the referenced elements will contain
+	// only as much qualifier as is required.
+	//
+	// For example, if a message is in the same package as the reference, the
+	// simple name can be used. If a message shares some context with the
+	// reference, only the unshared context needs to be included. For example:
+	//
+	//  message Foo {
+	//    message Bar {
+	//      enum Baz {
+	//        ZERO = 0;
+	//        ONE = 1;
+	//      }
+	//    }
+	//
+	//    // This field shares some context as the enum it references: they are
+	//    // both inside of the namespace Foo:
+	//    //    field is "Foo.my_baz"
+	//    //     enum is "Foo.Bar.Baz"
+	//    // So we only need to qualify the reference with the context that they
+	//    // do NOT have in common:
+	//    Bar.Baz my_baz = 1;
+	//  }
+	//
+	// When printing fully-qualified names, they will be preceded by a dot, to
+	// avoid any ambiguity that they might be relative vs. fully-qualified.
+	ForceFullyQualifiedNames bool
+}
+
+// CommentType is a kind of comments in a proto source file. This can be used
+// as a bitmask.
+type CommentType int
+
+const (
+	// CommentsDetached refers to comments that are not "attached" to any
+	// source element. They are attributed to the subsequent element in the
+	// file as "detached" comments.
+	CommentsDetached CommentType = 1 << iota
+	// CommentsTrailing refers to a comment block immediately following an
+	// element in the source file. If another element immediately follows
+	// the trailing comment, it is instead considered a leading comment for
+	// that subsequent element.
+	CommentsTrailing
+	// CommentsLeading refers to a comment block immediately preceding an
+	// element in the source file. For high-level elements (those that have
+	// their own descriptor), these are used as doc comments for that element.
+	CommentsLeading
+	// CommentsTokens refers to any comments (leading, trailing, or detached)
+	// on low-level elements in the file. "High-level" elements have their own
+	// descriptors, e.g. messages, enums, fields, services, and methods. But
+	// comments can appear anywhere (such as around identifiers and keywords,
+	// sprinkled inside the declarations of a high-level element). This class
+	// of comments are for those extra comments sprinkled into the file.
+	CommentsTokens
+
+	// CommentsNonDoc refers to comments that are *not* doc comments. This is a
+	// bitwise union of everything other than CommentsLeading. If you configure
+	// a printer to omit this, only doc comments on descriptor elements will be
+	// included in the printed output.
+	CommentsNonDoc = CommentsDetached | CommentsTrailing | CommentsTokens
+	// CommentsAll indicates all kinds of comments. If you configure a printer
+	// to omit this, no comments will appear in the printed output, even if the
+	// input descriptors had source info and comments.
+	CommentsAll = -1
+)
+
+// PrintProtoFiles prints all of the given file descriptors. The given open
+// function is given a file name and is responsible for creating the outputs and
+// returning the corresponding writer.
+func (p *Printer) PrintProtoFiles(fds []*desc.FileDescriptor, open func(name string) (io.WriteCloser, error)) error {
+	for _, fd := range fds {
+		w, err := open(fd.GetName())
+		if err != nil {
+			return fmt.Errorf("failed to open %s: %v", fd.GetName(), err)
+		}
+		err = func() error {
+			defer w.Close()
+			return p.PrintProtoFile(fd, w)
+		}()
+		if err != nil {
+			return fmt.Errorf("failed to write %s: %v", fd.GetName(), err)
+		}
+	}
+	return nil
+}
+
+// PrintProtosToFileSystem prints all of the given file descriptors to files in
+// the given directory. If file names in the given descriptors include path
+// information, they will be relative to the given root.
+func (p *Printer) PrintProtosToFileSystem(fds []*desc.FileDescriptor, rootDir string) error {
+	return p.PrintProtoFiles(fds, func(name string) (io.WriteCloser, error) {
+		fullPath := filepath.Join(rootDir, name)
+		dir := filepath.Dir(fullPath)
+		if err := os.MkdirAll(dir, os.ModePerm); err != nil {
+			return nil, err
+		}
+		return os.OpenFile(fullPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
+	})
+}
+
+// pkg represents a package name
+type pkg string
+
+// imp represents an imported file name
+type imp string
+
+// ident represents an identifier
+type ident string
+
+// option represents a resolved descriptor option
+type option struct {
+	name string
+	val  interface{}
+}
+
+// reservedRange represents a reserved range from a message or enum
+type reservedRange struct {
+	start, end int32
+}
+
+// PrintProtoFile prints the given single file descriptor to the given writer.
+func (p *Printer) PrintProtoFile(fd *desc.FileDescriptor, out io.Writer) error {
+	return p.printProto(fd, out)
+}
+
+// PrintProto prints the given descriptor and returns the resulting string. This
+// can be used to print proto files, but it can also be used to get the proto
+// "source form" for any kind of descriptor, which can be a more user-friendly
+// way to present descriptors that are intended for human consumption.
+func (p *Printer) PrintProtoToString(dsc desc.Descriptor) (string, error) {
+	var buf bytes.Buffer
+	if err := p.printProto(dsc, &buf); err != nil {
+		return "", err
+	}
+	return buf.String(), nil
+}
+
+func (p *Printer) printProto(dsc desc.Descriptor, out io.Writer) error {
+	w := newWriter(out)
+
+	if p.Indent == "" {
+		// default indent to two spaces
+		p.Indent = "  "
+	} else {
+		// indent must be all spaces or tabs, so convert other chars to spaces
+		ind := make([]rune, 0, len(p.Indent))
+		for _, r := range p.Indent {
+			if r == '\t' {
+				ind = append(ind, r)
+			} else {
+				ind = append(ind, ' ')
+			}
+		}
+		p.Indent = string(ind)
+	}
+	if p.OmitDetachedComments {
+		p.OmitComments |= CommentsDetached
+	}
+
+	er := dynamic.ExtensionRegistry{}
+	er.AddExtensionsFromFileRecursively(dsc.GetFile())
+	mf := dynamic.NewMessageFactoryWithExtensionRegistry(&er)
+	fdp := dsc.GetFile().AsFileDescriptorProto()
+	sourceInfo := internal.CreateSourceInfoMap(fdp)
+	extendOptionLocations(sourceInfo)
+
+	path := findElement(dsc)
+	switch d := dsc.(type) {
+	case *desc.FileDescriptor:
+		p.printFile(d, mf, w, sourceInfo)
+	case *desc.MessageDescriptor:
+		p.printMessage(d, mf, w, sourceInfo, path, 0)
+	case *desc.FieldDescriptor:
+		var scope string
+		if md, ok := d.GetParent().(*desc.MessageDescriptor); ok {
+			scope = md.GetFullyQualifiedName()
+		} else {
+			scope = d.GetFile().GetPackage()
+		}
+		if d.IsExtension() {
+			fmt.Fprint(w, "extend ")
+			extNameSi := sourceInfo.Get(append(path, internal.Field_extendeeTag))
+			p.printElementString(extNameSi, w, 0, p.qualifyName(d.GetFile().GetPackage(), scope, d.GetOwner().GetFullyQualifiedName()))
+			fmt.Fprintln(w, "{")
+
+			p.printField(d, mf, w, sourceInfo, path, scope, 1)
+
+			fmt.Fprintln(w, "}")
+		} else {
+			p.printField(d, mf, w, sourceInfo, path, scope, 0)
+		}
+	case *desc.OneOfDescriptor:
+		md := d.GetOwner()
+		elements := elementAddrs{dsc: md}
+		for i := range md.GetFields() {
+			elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Message_fieldsTag, elementIndex: i})
+		}
+		p.printOneOf(d, elements, 0, mf, w, sourceInfo, path[:len(path)-1], 0, path[len(path)-1])
+	case *desc.EnumDescriptor:
+		p.printEnum(d, mf, w, sourceInfo, path, 0)
+	case *desc.EnumValueDescriptor:
+		p.printEnumValue(d, mf, w, sourceInfo, path, 0)
+	case *desc.ServiceDescriptor:
+		p.printService(d, mf, w, sourceInfo, path, 0)
+	case *desc.MethodDescriptor:
+		p.printMethod(d, mf, w, sourceInfo, path, 0)
+	}
+
+	return w.err
+}
+
+func findElement(dsc desc.Descriptor) []int32 {
+	if dsc.GetParent() == nil {
+		return nil
+	}
+	path := findElement(dsc.GetParent())
+	switch d := dsc.(type) {
+	case *desc.MessageDescriptor:
+		if pm, ok := d.GetParent().(*desc.MessageDescriptor); ok {
+			return append(path, internal.Message_nestedMessagesTag, getMessageIndex(d, pm.GetNestedMessageTypes()))
+		}
+		return append(path, internal.File_messagesTag, getMessageIndex(d, d.GetFile().GetMessageTypes()))
+
+	case *desc.FieldDescriptor:
+		if d.IsExtension() {
+			if pm, ok := d.GetParent().(*desc.MessageDescriptor); ok {
+				return append(path, internal.Message_extensionsTag, getFieldIndex(d, pm.GetNestedExtensions()))
+			}
+			return append(path, internal.File_extensionsTag, getFieldIndex(d, d.GetFile().GetExtensions()))
+		}
+		return append(path, internal.Message_fieldsTag, getFieldIndex(d, d.GetOwner().GetFields()))
+
+	case *desc.OneOfDescriptor:
+		return append(path, internal.Message_oneOfsTag, getOneOfIndex(d, d.GetOwner().GetOneOfs()))
+
+	case *desc.EnumDescriptor:
+		if pm, ok := d.GetParent().(*desc.MessageDescriptor); ok {
+			return append(path, internal.Message_enumsTag, getEnumIndex(d, pm.GetNestedEnumTypes()))
+		}
+		return append(path, internal.File_enumsTag, getEnumIndex(d, d.GetFile().GetEnumTypes()))
+
+	case *desc.EnumValueDescriptor:
+		return append(path, internal.Enum_valuesTag, getEnumValueIndex(d, d.GetEnum().GetValues()))
+
+	case *desc.ServiceDescriptor:
+		return append(path, internal.File_servicesTag, getServiceIndex(d, d.GetFile().GetServices()))
+
+	case *desc.MethodDescriptor:
+		return append(path, internal.Service_methodsTag, getMethodIndex(d, d.GetService().GetMethods()))
+
+	default:
+		panic(fmt.Sprintf("unexpected descriptor type: %T", dsc))
+	}
+}
+
+func getMessageIndex(md *desc.MessageDescriptor, list []*desc.MessageDescriptor) int32 {
+	for i := range list {
+		if md == list[i] {
+			return int32(i)
+		}
+	}
+	panic(fmt.Sprintf("unable to determine index of message %s", md.GetFullyQualifiedName()))
+}
+
+func getFieldIndex(fd *desc.FieldDescriptor, list []*desc.FieldDescriptor) int32 {
+	for i := range list {
+		if fd == list[i] {
+			return int32(i)
+		}
+	}
+	panic(fmt.Sprintf("unable to determine index of field %s", fd.GetFullyQualifiedName()))
+}
+
+func getOneOfIndex(ood *desc.OneOfDescriptor, list []*desc.OneOfDescriptor) int32 {
+	for i := range list {
+		if ood == list[i] {
+			return int32(i)
+		}
+	}
+	panic(fmt.Sprintf("unable to determine index of oneof %s", ood.GetFullyQualifiedName()))
+}
+
+func getEnumIndex(ed *desc.EnumDescriptor, list []*desc.EnumDescriptor) int32 {
+	for i := range list {
+		if ed == list[i] {
+			return int32(i)
+		}
+	}
+	panic(fmt.Sprintf("unable to determine index of enum %s", ed.GetFullyQualifiedName()))
+}
+
+func getEnumValueIndex(evd *desc.EnumValueDescriptor, list []*desc.EnumValueDescriptor) int32 {
+	for i := range list {
+		if evd == list[i] {
+			return int32(i)
+		}
+	}
+	panic(fmt.Sprintf("unable to determine index of enum value %s", evd.GetFullyQualifiedName()))
+}
+
+func getServiceIndex(sd *desc.ServiceDescriptor, list []*desc.ServiceDescriptor) int32 {
+	for i := range list {
+		if sd == list[i] {
+			return int32(i)
+		}
+	}
+	panic(fmt.Sprintf("unable to determine index of service %s", sd.GetFullyQualifiedName()))
+}
+
+func getMethodIndex(mtd *desc.MethodDescriptor, list []*desc.MethodDescriptor) int32 {
+	for i := range list {
+		if mtd == list[i] {
+			return int32(i)
+		}
+	}
+	panic(fmt.Sprintf("unable to determine index of method %s", mtd.GetFullyQualifiedName()))
+}
+
+func (p *Printer) newLine(w io.Writer) {
+	if !p.Compact {
+		fmt.Fprintln(w)
+	}
+}
+
+func (p *Printer) printFile(fd *desc.FileDescriptor, mf *dynamic.MessageFactory, w *writer, sourceInfo internal.SourceInfoMap) {
+	opts, err := p.extractOptions(fd, fd.GetOptions(), mf)
+	if err != nil {
+		return
+	}
+
+	fdp := fd.AsFileDescriptorProto()
+	path := make([]int32, 1)
+
+	path[0] = internal.File_packageTag
+	sourceInfo.PutIfAbsent(append(path, 0), sourceInfo.Get(path))
+
+	path[0] = internal.File_syntaxTag
+	si := sourceInfo.Get(path)
+	p.printElement(false, si, w, 0, func(w *writer) {
+		syn := fdp.GetSyntax()
+		if syn == "" {
+			syn = "proto2"
+		}
+		fmt.Fprintf(w, "syntax = %q;", syn)
+	})
+	p.newLine(w)
+
+	elements := elementAddrs{dsc: fd, opts: opts}
+	if fdp.Package != nil {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.File_packageTag, elementIndex: 0, order: -3})
+	}
+	for i := range fd.AsFileDescriptorProto().GetDependency() {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.File_dependencyTag, elementIndex: i, order: -2})
+	}
+	elements.addrs = append(elements.addrs, optionsAsElementAddrs(internal.File_optionsTag, -1, opts)...)
+	for i := range fd.GetMessageTypes() {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.File_messagesTag, elementIndex: i})
+	}
+	for i := range fd.GetEnumTypes() {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.File_enumsTag, elementIndex: i})
+	}
+	for i := range fd.GetServices() {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.File_servicesTag, elementIndex: i})
+	}
+	for i := range fd.GetExtensions() {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.File_extensionsTag, elementIndex: i})
+	}
+
+	p.sort(elements, sourceInfo, nil)
+
+	pkgName := fd.GetPackage()
+
+	var ext *desc.FieldDescriptor
+	for i, el := range elements.addrs {
+		d := elements.at(el)
+		path = []int32{el.elementType, int32(el.elementIndex)}
+		if el.elementType == internal.File_extensionsTag {
+			fld := d.(*desc.FieldDescriptor)
+			if ext == nil || ext.GetOwner() != fld.GetOwner() {
+				// need to open a new extend block
+				if ext != nil {
+					// close preceding extend block
+					fmt.Fprintln(w, "}")
+				}
+				if i > 0 {
+					p.newLine(w)
+				}
+
+				ext = fld
+				fmt.Fprint(w, "extend ")
+				extNameSi := sourceInfo.Get(append(path, internal.Field_extendeeTag))
+				p.printElementString(extNameSi, w, 0, p.qualifyName(pkgName, pkgName, fld.GetOwner().GetFullyQualifiedName()))
+				fmt.Fprintln(w, "{")
+			} else {
+				p.newLine(w)
+			}
+			p.printField(fld, mf, w, sourceInfo, path, pkgName, 1)
+		} else {
+			if ext != nil {
+				// close preceding extend block
+				fmt.Fprintln(w, "}")
+				ext = nil
+			}
+
+			if i > 0 {
+				p.newLine(w)
+			}
+
+			switch d := d.(type) {
+			case pkg:
+				si := sourceInfo.Get(path)
+				p.printElement(false, si, w, 0, func(w *writer) {
+					fmt.Fprintf(w, "package %s;", d)
+				})
+			case imp:
+				si := sourceInfo.Get(path)
+				p.printElement(false, si, w, 0, func(w *writer) {
+					fmt.Fprintf(w, "import %q;", d)
+				})
+			case []option:
+				p.printOptionsLong(d, w, sourceInfo, path, 0)
+			case *desc.MessageDescriptor:
+				p.printMessage(d, mf, w, sourceInfo, path, 0)
+			case *desc.EnumDescriptor:
+				p.printEnum(d, mf, w, sourceInfo, path, 0)
+			case *desc.ServiceDescriptor:
+				p.printService(d, mf, w, sourceInfo, path, 0)
+			}
+		}
+	}
+
+	if ext != nil {
+		// close trailing extend block
+		fmt.Fprintln(w, "}")
+	}
+}
+
+func (p *Printer) sort(elements elementAddrs, sourceInfo internal.SourceInfoMap, path []int32) {
+	if p.SortElements {
+		// canonical sorted order
+		sort.Stable(elements)
+	} else {
+		// use source order (per location information in SourceCodeInfo); or
+		// if that isn't present use declaration order, but grouped by type
+		sort.Stable(elementSrcOrder{
+			elementAddrs: elements,
+			sourceInfo:   sourceInfo,
+			prefix:       path,
+		})
+	}
+}
+
+func (p *Printer) qualifyName(pkg, scope string, fqn string) string {
+	if p.ForceFullyQualifiedNames {
+		// forcing fully-qualified names; make sure to include preceding dot
+		if fqn[0] == '.' {
+			return fqn
+		}
+		return fmt.Sprintf(".%s", fqn)
+	}
+
+	// compute relative name (so no leading dot)
+	if fqn[0] == '.' {
+		fqn = fqn[1:]
+	}
+	if len(scope) > 0 && scope[len(scope)-1] != '.' {
+		scope = scope + "."
+	}
+	for scope != "" {
+		if strings.HasPrefix(fqn, scope) {
+			return fqn[len(scope):]
+		}
+		if scope == pkg+"." {
+			break
+		}
+		pos := strings.LastIndex(scope[:len(scope)-1], ".")
+		scope = scope[:pos+1]
+	}
+	return fqn
+}
+
+func (p *Printer) typeString(fld *desc.FieldDescriptor, scope string) string {
+	if fld.IsMap() {
+		return fmt.Sprintf("map<%s, %s>", p.typeString(fld.GetMapKeyType(), scope), p.typeString(fld.GetMapValueType(), scope))
+	}
+	switch fld.GetType() {
+	case descriptor.FieldDescriptorProto_TYPE_INT32:
+		return "int32"
+	case descriptor.FieldDescriptorProto_TYPE_INT64:
+		return "int64"
+	case descriptor.FieldDescriptorProto_TYPE_UINT32:
+		return "uint32"
+	case descriptor.FieldDescriptorProto_TYPE_UINT64:
+		return "uint64"
+	case descriptor.FieldDescriptorProto_TYPE_SINT32:
+		return "sint32"
+	case descriptor.FieldDescriptorProto_TYPE_SINT64:
+		return "sint64"
+	case descriptor.FieldDescriptorProto_TYPE_FIXED32:
+		return "fixed32"
+	case descriptor.FieldDescriptorProto_TYPE_FIXED64:
+		return "fixed64"
+	case descriptor.FieldDescriptorProto_TYPE_SFIXED32:
+		return "sfixed32"
+	case descriptor.FieldDescriptorProto_TYPE_SFIXED64:
+		return "sfixed64"
+	case descriptor.FieldDescriptorProto_TYPE_FLOAT:
+		return "float"
+	case descriptor.FieldDescriptorProto_TYPE_DOUBLE:
+		return "double"
+	case descriptor.FieldDescriptorProto_TYPE_BOOL:
+		return "bool"
+	case descriptor.FieldDescriptorProto_TYPE_STRING:
+		return "string"
+	case descriptor.FieldDescriptorProto_TYPE_BYTES:
+		return "bytes"
+	case descriptor.FieldDescriptorProto_TYPE_ENUM:
+		return p.qualifyName(fld.GetFile().GetPackage(), scope, fld.GetEnumType().GetFullyQualifiedName())
+	case descriptor.FieldDescriptorProto_TYPE_MESSAGE:
+		return p.qualifyName(fld.GetFile().GetPackage(), scope, fld.GetMessageType().GetFullyQualifiedName())
+	case descriptor.FieldDescriptorProto_TYPE_GROUP:
+		return fld.GetMessageType().GetName()
+	}
+	panic(fmt.Sprintf("invalid type: %v", fld.GetType()))
+}
+
+func (p *Printer) printMessage(md *desc.MessageDescriptor, mf *dynamic.MessageFactory, w *writer, sourceInfo internal.SourceInfoMap, path []int32, indent int) {
+	si := sourceInfo.Get(path)
+	p.printElement(true, si, w, indent, func(w *writer) {
+		p.indent(w, indent)
+
+		fmt.Fprint(w, "message ")
+		nameSi := sourceInfo.Get(append(path, internal.Message_nameTag))
+		p.printElementString(nameSi, w, indent, md.GetName())
+		fmt.Fprintln(w, "{")
+
+		p.printMessageBody(md, mf, w, sourceInfo, path, indent+1)
+		p.indent(w, indent)
+		fmt.Fprintln(w, "}")
+	})
+}
+
+func (p *Printer) printMessageBody(md *desc.MessageDescriptor, mf *dynamic.MessageFactory, w *writer, sourceInfo internal.SourceInfoMap, path []int32, indent int) {
+	opts, err := p.extractOptions(md, md.GetOptions(), mf)
+	if err != nil {
+		if w.err == nil {
+			w.err = err
+		}
+		return
+	}
+
+	skip := map[interface{}]bool{}
+
+	elements := elementAddrs{dsc: md, opts: opts}
+	elements.addrs = append(elements.addrs, optionsAsElementAddrs(internal.Message_optionsTag, -1, opts)...)
+	for i := range md.AsDescriptorProto().GetReservedRange() {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Message_reservedRangeTag, elementIndex: i})
+	}
+	for i := range md.AsDescriptorProto().GetReservedName() {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Message_reservedNameTag, elementIndex: i})
+	}
+	for i := range md.AsDescriptorProto().GetExtensionRange() {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Message_extensionRangeTag, elementIndex: i})
+	}
+	for i, fld := range md.GetFields() {
+		if fld.IsMap() || fld.GetType() == descriptor.FieldDescriptorProto_TYPE_GROUP {
+			// we don't emit nested messages for map types or groups since
+			// they get special treatment
+			skip[fld.GetMessageType()] = true
+		}
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Message_fieldsTag, elementIndex: i})
+	}
+	for i := range md.GetNestedMessageTypes() {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Message_nestedMessagesTag, elementIndex: i})
+	}
+	for i := range md.GetNestedEnumTypes() {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Message_enumsTag, elementIndex: i})
+	}
+	for i := range md.GetNestedExtensions() {
+		elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Message_extensionsTag, elementIndex: i})
+	}
+
+	p.sort(elements, sourceInfo, path)
+
+	pkg := md.GetFile().GetPackage()
+	scope := md.GetFullyQualifiedName()
+
+	var ext *desc.FieldDescriptor
+	for i, el := range elements.addrs {
+		d := elements.at(el)
+		// skip[d] will panic if d is a slice (which it could be for []option),
+		// so just ignore it since we don't try to skip options
+		if reflect.TypeOf(d).Kind() != reflect.Slice && skip[d] {
+			// skip this element
+			continue
+		}
+
+		childPath := append(path, el.elementType, int32(el.elementIndex))
+		if el.elementType == internal.Message_extensionsTag {
+			// extension
+			fld := d.(*desc.FieldDescriptor)
+			if ext == nil || ext.GetOwner() != fld.GetOwner() {
+				// need to open a new extend block
+				if ext != nil {
+					// close preceding extend block
+					p.indent(w, indent)
+					fmt.Fprintln(w, "}")
+				}
+				if i > 0 {
+					p.newLine(w)
+				}
+
+				ext = fld
+				p.indent(w, indent)
+				fmt.Fprint(w, "extend ")
+				extNameSi := sourceInfo.Get(append(childPath, internal.Field_extendeeTag))
+				p.printElementString(extNameSi, w, indent, p.qualifyName(pkg, scope, fld.GetOwner().GetFullyQualifiedName()))
+				fmt.Fprintln(w, "{")
+			} else {
+				p.newLine(w)
+			}
+			p.printField(fld, mf, w, sourceInfo, childPath, scope, indent+1)
+		} else {
+			if ext != nil {
+				// close preceding extend block
+				p.indent(w, indent)
+				fmt.Fprintln(w, "}")
+				ext = nil
+			}
+
+			if i > 0 {
+				p.newLine(w)
+			}
+
+			switch d := d.(type) {
+			case []option:
+				p.printOptionsLong(d, w, sourceInfo, childPath, indent)
+			case *desc.FieldDescriptor:
+				ood := d.GetOneOf()
+				if ood == nil {
+					p.printField(d, mf, w, sourceInfo, childPath, scope, indent)
+				} else if !skip[ood] {
+					// print the one-of, including all of its fields
+					p.printOneOf(ood, elements, i, mf, w, sourceInfo, path, indent, d.AsFieldDescriptorProto().GetOneofIndex())
+					skip[ood] = true
+				}
+			case *desc.MessageDescriptor:
+				p.printMessage(d, mf, w, sourceInfo, childPath, indent)
+			case *desc.EnumDescriptor:
+				p.printEnum(d, mf, w, sourceInfo, childPath, indent)
+			case *descriptor.DescriptorProto_ExtensionRange:
+				// collapse ranges into a single "extensions" block
+				ranges := []*descriptor.DescriptorProto_ExtensionRange{d}
+				addrs := []elementAddr{el}
+				for idx := i + 1; idx < len(elements.addrs); idx++ {
+					elnext := elements.addrs[idx]
+					if elnext.elementType != el.elementType {
+						break
+					}
+					extr := elements.at(elnext).(*descriptor.DescriptorProto_ExtensionRange)
+					if !areEqual(d.Options, extr.Options, mf) {
+						break
+					}
+					ranges = append(ranges, extr)
+					addrs = append(addrs, elnext)
+					skip[extr] = true
+				}
+				p.printExtensionRanges(md, ranges, addrs, mf, w, sourceInfo, path, indent)
+			case reservedRange:
+				// collapse reserved ranges into a single "reserved" block
+				ranges := []reservedRange{d}
+				addrs := []elementAddr{el}
+				for idx := i + 1; idx < len(elements.addrs); idx++ {
+					elnext := elements.addrs[idx]
+					if elnext.elementType != el.elementType {
+						break
+					}
+					rr := elements.at(elnext).(reservedRange)
+					ranges = append(ranges, rr)
+					addrs = append(addrs, elnext)
+					skip[rr] = true
+				}
+				p.printReservedRanges(ranges, false, addrs, w, sourceInfo, path, indent)
+			case string: // reserved name
+				// collapse reserved names into a single "reserved" block
+				names := []string{d}
+				addrs := []elementAddr{el}
+				for idx := i + 1; idx < len(elements.addrs); idx++ {
+					elnext := elements.addrs[idx]
+					if elnext.elementType != el.elementType {
+						break
+					}
+					rn := elements.at(elnext).(string)
+					names = append(names, rn)
+					addrs = append(addrs, elnext)
+					skip[rn] = true
+				}
+				p.printReservedNames(names, addrs, w, sourceInfo, path, indent)
+			}
+		}
+	}
+
+	if ext != nil {
+		// close trailing extend block
+		p.indent(w, indent)
+		fmt.Fprintln(w, "}")
+	}
+}
+
+func areEqual(a, b proto.Message, mf *dynamic.MessageFactory) bool {
+	// proto.Equal doesn't handle unknown extensions very well :(
+	// so we convert to a dynamic message (which should know about all extensions via
+	// extension registry) and then compare
+	return dynamic.MessagesEqual(asDynamicIfPossible(a, mf), asDynamicIfPossible(b, mf))
+}
+
+func asDynamicIfPossible(msg proto.Message, mf *dynamic.MessageFactory) proto.Message {
+	if dm, ok := msg.(*dynamic.Message); ok {
+		return dm
+	} else {
+		md, err := desc.LoadMessageDescriptorForMessage(msg)
+		if err == nil {
+			dm := mf.NewDynamicMessage(md)
+			if dm.ConvertFrom(msg) == nil {
+				return dm
+			}
+		}
+	}
+	return msg
+}
+
+func (p *Printer) printField(fld *desc.FieldDescriptor, mf *dynamic.MessageFactory, w *writer, sourceInfo internal.SourceInfoMap, path []int32, scope string, indent int) {
+	var groupPath []int32
+	var si *descriptor.SourceCodeInfo_Location
+	if isGroup(fld) {
+		// compute path to group message type
+		groupPath = make([]int32, len(path)-2)
+		copy(groupPath, path)
+		var groupMsgIndex int32
+		md := fld.GetParent().(*desc.MessageDescriptor)
+		for i, nmd := range md.GetNestedMessageTypes() {
+			if nmd == fld.GetMessageType() {
+				// found it
+				groupMsgIndex = int32(i)
+				break
+			}
+		}
+		groupPath = append(groupPath, internal.Message_nestedMessagesTag, groupMsgIndex)
+
+		// the group message is where the field's comments and position are stored
+		si = sourceInfo.Get(groupPath)
+	} else {
+		si = sourceInfo.Get(path)
+	}
+
+	p.printElement(true, si, w, indent, func(w *writer) {
+		p.indent(w, indent)
+		if shouldEmitLabel(fld) {
+			locSi := sourceInfo.Get(append(path, internal.Field_labelTag))
+			p.printElementString(locSi, w, indent, labelString(fld.GetLabel()))
+		}
+
+		if isGroup(fld) {
+			fmt.Fprint(w, "group ")
+
+			typeSi := sourceInfo.Get(append(path, internal.Field_typeTag))
+			p.printElementString(typeSi, w, indent, p.typeString(fld, scope))
+			fmt.Fprint(w, "= ")
+
+			numSi := sourceInfo.Get(append(path, internal.Field_numberTag))
+			p.printElementString(numSi, w, indent, fmt.Sprintf("%d", fld.GetNumber()))
+
+			fmt.Fprintln(w, "{")
+			p.printMessageBody(fld.GetMessageType(), mf, w, sourceInfo, groupPath, indent+1)
+
+			p.indent(w, indent)
+			fmt.Fprintln(w, "}")
+		} else {
+			typeSi := sourceInfo.Get(append(path, internal.Field_typeTag))
+			p.printElementString(typeSi, w, indent, p.typeString(fld, scope))
+
+			nameSi := sourceInfo.Get(append(path, internal.Field_nameTag))
+			p.printElementString(nameSi, w, indent, fld.GetName())
+			fmt.Fprint(w, "= ")
+
+			numSi := sourceInfo.Get(append(path, internal.Field_numberTag))
+			p.printElementString(numSi, w, indent, fmt.Sprintf("%d", fld.GetNumber()))
+
+			opts, err := p.extractOptions(fld, fld.GetOptions(), mf)
+			if err != nil {
+				if w.err == nil {
+					w.err = err
+				}
+				return
+			}
+
+			// we use negative values for "extras" keys so they can't collide
+			// with legit option tags
+
+			if !fld.GetFile().IsProto3() && fld.AsFieldDescriptorProto().DefaultValue != nil {
+				defVal := fld.GetDefaultValue()
+				if fld.GetEnumType() != nil {
+					defVal = fld.GetEnumType().FindValueByNumber(defVal.(int32))
+				}
+				opts[-internal.Field_defaultTag] = []option{{name: "default", val: defVal}}
+			}
+
+			jsn := fld.AsFieldDescriptorProto().GetJsonName()
+			if jsn != "" && jsn != internal.JsonName(fld.GetName()) {
+				opts[-internal.Field_jsonNameTag] = []option{{name: "json_name", val: jsn}}
+			}
+
+			elements := elementAddrs{dsc: fld, opts: opts}
+			elements.addrs = optionsAsElementAddrs(internal.Field_optionsTag, 0, opts)
+			p.sort(elements, sourceInfo, path)
+			p.printOptionElementsShort(elements, w, sourceInfo, path, indent)
+
+			fmt.Fprint(w, ";")
+		}
+	})
+}
+
+func shouldEmitLabel(fld *desc.FieldDescriptor) bool {
+	return !fld.IsMap() && fld.GetOneOf() == nil && (fld.GetLabel() != descriptor.FieldDescriptorProto_LABEL_OPTIONAL || !fld.GetFile().IsProto3())
+}
+
+func labelString(lbl descriptor.FieldDescriptorProto_Label) string {
+	switch lbl {
+	case descriptor.FieldDescriptorProto_LABEL_OPTIONAL:
+		return "optional"
+	case descriptor.FieldDescriptorProto_LABEL_REQUIRED:
+		return "required"
+	case descriptor.FieldDescriptorProto_LABEL_REPEATED:
+		return "repeated"
+	}
+	panic(fmt.Sprintf("invalid label: %v", lbl))
+}
+
+func isGroup(fld *desc.FieldDescriptor) bool {
+	return fld.GetType() == descriptor.FieldDescriptorProto_TYPE_GROUP
+}
+
+func (p *Printer) printOneOf(ood *desc.OneOfDescriptor, parentElements elementAddrs, startFieldIndex int, mf *dynamic.MessageFactory, w *writer, sourceInfo internal.SourceInfoMap, parentPath []int32, indent int, ooIndex int32) {
+	oopath := append(parentPath, internal.Message_oneOfsTag, ooIndex)
+	oosi := sourceInfo.Get(oopath)
+	p.printElement(true, oosi, w, indent, func(w *writer) {
+		p.indent(w, indent)
+		fmt.Fprint(w, "oneof ")
+		extNameSi := sourceInfo.Get(append(oopath, internal.OneOf_nameTag))
+		p.printElementString(extNameSi, w, indent, ood.GetName())
+		fmt.Fprintln(w, "{")
+
+		indent++
+		opts, err := p.extractOptions(ood, ood.GetOptions(), mf)
+		if err != nil {
+			if w.err == nil {
+				w.err = err
+			}
+			return
+		}
+
+		elements := elementAddrs{dsc: ood, opts: opts}
+		elements.addrs = append(elements.addrs, optionsAsElementAddrs(internal.OneOf_optionsTag, -1, opts)...)
+
+		count := len(ood.GetChoices())
+		for idx := startFieldIndex; count > 0 && idx < len(parentElements.addrs); idx++ {
+			el := parentElements.addrs[idx]
+			if el.elementType != internal.Message_fieldsTag {
+				continue
+			}
+			if parentElements.at(el).(*desc.FieldDescriptor).GetOneOf() == ood {
+				// negative tag indicates that this element is actually a sibling, not a child
+				elements.addrs = append(elements.addrs, elementAddr{elementType: -internal.Message_fieldsTag, elementIndex: el.elementIndex})
+				count--
+			}
+		}
+
+		p.sort(elements, sourceInfo, oopath)
+
+		scope := ood.GetOwner().GetFullyQualifiedName()
+
+		for i, el := range elements.addrs {
+			if i > 0 {
+				p.newLine(w)
+			}
+
+			switch d := elements.at(el).(type) {
+			case []option:
+				childPath := append(oopath, el.elementType, int32(el.elementIndex))
+				p.printOptionsLong(d, w, sourceInfo, childPath, indent)
+			case *desc.FieldDescriptor:
+				childPath := append(parentPath, -el.elementType, int32(el.elementIndex))
+				p.printField(d, mf, w, sourceInfo, childPath, scope, indent)
+			}
+		}
+
+		p.indent(w, indent-1)
+		fmt.Fprintln(w, "}")
+	})
+}
+
+func (p *Printer) printExtensionRanges(parent *desc.MessageDescriptor, ranges []*descriptor.DescriptorProto_ExtensionRange, addrs []elementAddr, mf *dynamic.MessageFactory, w *writer, sourceInfo internal.SourceInfoMap, parentPath []int32, indent int) {
+	p.indent(w, indent)
+	fmt.Fprint(w, "extensions ")
+
+	var opts *descriptor.ExtensionRangeOptions
+	var elPath []int32
+	first := true
+	for i, extr := range ranges {
+		if first {
+			first = false
+		} else {
+			fmt.Fprint(w, ", ")
+		}
+		opts = extr.Options
+		el := addrs[i]
+		elPath = append(parentPath, el.elementType, int32(el.elementIndex))
+		si := sourceInfo.Get(elPath)
+		p.printElement(true, si, w, inline(indent), func(w *writer) {
+			if extr.GetStart() == extr.GetEnd()-1 {
+				fmt.Fprintf(w, "%d ", extr.GetStart())
+			} else if extr.GetEnd()-1 == internal.MaxTag {
+				fmt.Fprintf(w, "%d to max ", extr.GetStart())
+			} else {
+				fmt.Fprintf(w, "%d to %d ", extr.GetStart(), extr.GetEnd()-1)
+			}
+		})
+	}
+	dsc := extensionRange{owner: parent, extRange: ranges[0]}
+	p.printOptionsShort(dsc, opts, mf, internal.ExtensionRange_optionsTag, w, sourceInfo, elPath, indent)
+
+	fmt.Fprintln(w, ";")
+}
+
+func (p *Printer) printReservedRanges(ranges []reservedRange, isEnum bool, addrs []elementAddr, w *writer, sourceInfo internal.SourceInfoMap, parentPath []int32, indent int) {
+	p.indent(w, indent)
+	fmt.Fprint(w, "reserved ")
+
+	first := true
+	for i, rr := range ranges {
+		if first {
+			first = false
+		} else {
+			fmt.Fprint(w, ", ")
+		}
+		el := addrs[i]
+		si := sourceInfo.Get(append(parentPath, el.elementType, int32(el.elementIndex)))
+		p.printElement(false, si, w, inline(indent), func(w *writer) {
+			if rr.start == rr.end {
+				fmt.Fprintf(w, "%d ", rr.start)
+			} else if (rr.end == internal.MaxTag && !isEnum) ||
+				(rr.end == math.MaxInt32 && isEnum) {
+				fmt.Fprintf(w, "%d to max ", rr.start)
+			} else {
+				fmt.Fprintf(w, "%d to %d ", rr.start, rr.end)
+			}
+		})
+	}
+
+	fmt.Fprintln(w, ";")
+}
+
+func (p *Printer) printReservedNames(names []string, addrs []elementAddr, w *writer, sourceInfo internal.SourceInfoMap, parentPath []int32, indent int) {
+	p.indent(w, indent)
+	fmt.Fprint(w, "reserved ")
+
+	first := true
+	for i, name := range names {
+		if first {
+			first = false
+		} else {
+			fmt.Fprint(w, ", ")
+		}
+		el := addrs[i]
+		si := sourceInfo.Get(append(parentPath, el.elementType, int32(el.elementIndex)))
+		p.printElementString(si, w, indent, quotedString(name))
+	}
+
+	fmt.Fprintln(w, ";")
+}
+
+func (p *Printer) printEnum(ed *desc.EnumDescriptor, mf *dynamic.MessageFactory, w *writer, sourceInfo internal.SourceInfoMap, path []int32, indent int) {
+	si := sourceInfo.Get(path)
+	p.printElement(true, si, w, indent, func(w *writer) {
+		p.indent(w, indent)
+
+		fmt.Fprint(w, "enum ")
+		nameSi := sourceInfo.Get(append(path, internal.Enum_nameTag))
+		p.printElementString(nameSi, w, indent, ed.GetName())
+		fmt.Fprintln(w, "{")
+
+		indent++
+		opts, err := p.extractOptions(ed, ed.GetOptions(), mf)
+		if err != nil {
+			if w.err == nil {
+				w.err = err
+			}
+			return
+		}
+
+		skip := map[interface{}]bool{}
+
+		elements := elementAddrs{dsc: ed, opts: opts}
+		elements.addrs = append(elements.addrs, optionsAsElementAddrs(internal.Enum_optionsTag, -1, opts)...)
+		for i := range ed.GetValues() {
+			elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Enum_valuesTag, elementIndex: i})
+		}
+		for i := range ed.AsEnumDescriptorProto().GetReservedRange() {
+			elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Enum_reservedRangeTag, elementIndex: i})
+		}
+		for i := range ed.AsEnumDescriptorProto().GetReservedName() {
+			elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Enum_reservedNameTag, elementIndex: i})
+		}
+
+		p.sort(elements, sourceInfo, path)
+
+		for i, el := range elements.addrs {
+			d := elements.at(el)
+
+			// skip[d] will panic if d is a slice (which it could be for []option),
+			// so just ignore it since we don't try to skip options
+			if reflect.TypeOf(d).Kind() != reflect.Slice && skip[d] {
+				// skip this element
+				continue
+			}
+
+			if i > 0 {
+				p.newLine(w)
+			}
+
+			childPath := append(path, el.elementType, int32(el.elementIndex))
+
+			switch d := d.(type) {
+			case []option:
+				p.printOptionsLong(d, w, sourceInfo, childPath, indent)
+			case *desc.EnumValueDescriptor:
+				p.printEnumValue(d, mf, w, sourceInfo, childPath, indent)
+			case reservedRange:
+				// collapse reserved ranges into a single "reserved" block
+				ranges := []reservedRange{d}
+				addrs := []elementAddr{el}
+				for idx := i + 1; idx < len(elements.addrs); idx++ {
+					elnext := elements.addrs[idx]
+					if elnext.elementType != el.elementType {
+						break
+					}
+					rr := elements.at(elnext).(reservedRange)
+					ranges = append(ranges, rr)
+					addrs = append(addrs, elnext)
+					skip[rr] = true
+				}
+				p.printReservedRanges(ranges, true, addrs, w, sourceInfo, path, indent)
+			case string: // reserved name
+				// collapse reserved names into a single "reserved" block
+				names := []string{d}
+				addrs := []elementAddr{el}
+				for idx := i + 1; idx < len(elements.addrs); idx++ {
+					elnext := elements.addrs[idx]
+					if elnext.elementType != el.elementType {
+						break
+					}
+					rn := elements.at(elnext).(string)
+					names = append(names, rn)
+					addrs = append(addrs, elnext)
+					skip[rn] = true
+				}
+				p.printReservedNames(names, addrs, w, sourceInfo, path, indent)
+			}
+		}
+
+		p.indent(w, indent-1)
+		fmt.Fprintln(w, "}")
+	})
+}
+
+func (p *Printer) printEnumValue(evd *desc.EnumValueDescriptor, mf *dynamic.MessageFactory, w *writer, sourceInfo internal.SourceInfoMap, path []int32, indent int) {
+	si := sourceInfo.Get(path)
+	p.printElement(true, si, w, indent, func(w *writer) {
+		p.indent(w, indent)
+
+		nameSi := sourceInfo.Get(append(path, internal.EnumVal_nameTag))
+		p.printElementString(nameSi, w, indent, evd.GetName())
+		fmt.Fprint(w, "= ")
+
+		numSi := sourceInfo.Get(append(path, internal.EnumVal_numberTag))
+		p.printElementString(numSi, w, indent, fmt.Sprintf("%d", evd.GetNumber()))
+
+		p.printOptionsShort(evd, evd.GetOptions(), mf, internal.EnumVal_optionsTag, w, sourceInfo, path, indent)
+
+		fmt.Fprint(w, ";")
+	})
+}
+
+func (p *Printer) printService(sd *desc.ServiceDescriptor, mf *dynamic.MessageFactory, w *writer, sourceInfo internal.SourceInfoMap, path []int32, indent int) {
+	si := sourceInfo.Get(path)
+	p.printElement(true, si, w, indent, func(w *writer) {
+		p.indent(w, indent)
+
+		fmt.Fprint(w, "service ")
+		nameSi := sourceInfo.Get(append(path, internal.Service_nameTag))
+		p.printElementString(nameSi, w, indent, sd.GetName())
+		fmt.Fprintln(w, "{")
+
+		indent++
+
+		opts, err := p.extractOptions(sd, sd.GetOptions(), mf)
+		if err != nil {
+			if w.err == nil {
+				w.err = err
+			}
+			return
+		}
+
+		elements := elementAddrs{dsc: sd, opts: opts}
+		elements.addrs = append(elements.addrs, optionsAsElementAddrs(internal.Service_optionsTag, -1, opts)...)
+		for i := range sd.GetMethods() {
+			elements.addrs = append(elements.addrs, elementAddr{elementType: internal.Service_methodsTag, elementIndex: i})
+		}
+
+		p.sort(elements, sourceInfo, path)
+
+		for i, el := range elements.addrs {
+			if i > 0 {
+				p.newLine(w)
+			}
+
+			childPath := append(path, el.elementType, int32(el.elementIndex))
+
+			switch d := elements.at(el).(type) {
+			case []option:
+				p.printOptionsLong(d, w, sourceInfo, childPath, indent)
+			case *desc.MethodDescriptor:
+				p.printMethod(d, mf, w, sourceInfo, childPath, indent)
+			}
+		}
+
+		p.indent(w, indent-1)
+		fmt.Fprintln(w, "}")
+	})
+}
+
+func (p *Printer) printMethod(mtd *desc.MethodDescriptor, mf *dynamic.MessageFactory, w *writer, sourceInfo internal.SourceInfoMap, path []int32, indent int) {
+	si := sourceInfo.Get(path)
+	pkg := mtd.GetFile().GetPackage()
+	p.printElement(true, si, w, indent, func(w *writer) {
+		p.indent(w, indent)
+
+		fmt.Fprint(w, "rpc ")
+		nameSi := sourceInfo.Get(append(path, internal.Method_nameTag))
+		p.printElementString(nameSi, w, indent, mtd.GetName())
+
+		fmt.Fprint(w, "( ")
+		inSi := sourceInfo.Get(append(path, internal.Method_inputTag))
+		inName := p.qualifyName(pkg, pkg, mtd.GetInputType().GetFullyQualifiedName())
+		if mtd.IsClientStreaming() {
+			inName = "stream " + inName
+		}
+		p.printElementString(inSi, w, indent, inName)
+
+		fmt.Fprint(w, ") returns ( ")
+
+		outSi := sourceInfo.Get(append(path, internal.Method_outputTag))
+		outName := p.qualifyName(pkg, pkg, mtd.GetOutputType().GetFullyQualifiedName())
+		if mtd.IsServerStreaming() {
+			outName = "stream " + outName
+		}
+		p.printElementString(outSi, w, indent, outName)
+		fmt.Fprint(w, ") ")
+
+		opts, err := p.extractOptions(mtd, mtd.GetOptions(), mf)
+		if err != nil {
+			if w.err == nil {
+				w.err = err
+			}
+			return
+		}
+
+		if len(opts) > 0 {
+			fmt.Fprintln(w, "{")
+			indent++
+
+			elements := elementAddrs{dsc: mtd, opts: opts}
+			elements.addrs = optionsAsElementAddrs(internal.Method_optionsTag, 0, opts)
+			p.sort(elements, sourceInfo, path)
+			path = append(path, internal.Method_optionsTag)
+
+			for i, addr := range elements.addrs {
+				if i > 0 {
+					p.newLine(w)
+				}
+				o := elements.at(addr).([]option)
+				p.printOptionsLong(o, w, sourceInfo, path, indent)
+			}
+
+			p.indent(w, indent-1)
+			fmt.Fprintln(w, "}")
+		} else {
+			fmt.Fprint(w, ";")
+		}
+	})
+}
+
+func (p *Printer) printOptionsLong(opts []option, w *writer, sourceInfo internal.SourceInfoMap, path []int32, indent int) {
+	p.printOptions(opts, w, indent,
+		func(i int32) *descriptor.SourceCodeInfo_Location {
+			return sourceInfo.Get(append(path, i))
+		},
+		func(w *writer, indent int, opt option) {
+			p.indent(w, indent)
+			fmt.Fprint(w, "option ")
+			p.printOption(opt.name, opt.val, w, indent)
+			fmt.Fprint(w, ";")
+		})
+}
+
+func (p *Printer) printOptionsShort(dsc interface{}, optsMsg proto.Message, mf *dynamic.MessageFactory, optsTag int32, w *writer, sourceInfo internal.SourceInfoMap, path []int32, indent int) {
+	d, ok := dsc.(desc.Descriptor)
+	if !ok {
+		d = dsc.(extensionRange).owner
+	}
+	opts, err := p.extractOptions(d, optsMsg, mf)
+	if err != nil {
+		if w.err == nil {
+			w.err = err
+		}
+		return
+	}
+
+	elements := elementAddrs{dsc: dsc, opts: opts}
+	elements.addrs = optionsAsElementAddrs(optsTag, 0, opts)
+	p.sort(elements, sourceInfo, path)
+	p.printOptionElementsShort(elements, w, sourceInfo, path, indent)
+}
+
+func (p *Printer) printOptionElementsShort(addrs elementAddrs, w *writer, sourceInfo internal.SourceInfoMap, path []int32, indent int) {
+	if len(addrs.addrs) == 0 {
+		return
+	}
+	first := true
+	fmt.Fprint(w, "[")
+	for _, addr := range addrs.addrs {
+		opts := addrs.at(addr).([]option)
+		var childPath []int32
+		if addr.elementIndex < 0 {
+			// pseudo-option
+			childPath = append(path, int32(-addr.elementIndex))
+		} else {
+			childPath = append(path, addr.elementType, int32(addr.elementIndex))
+		}
+		p.printOptions(opts, w, inline(indent),
+			func(i int32) *descriptor.SourceCodeInfo_Location {
+				p := childPath
+				if addr.elementIndex >= 0 {
+					p = append(p, i)
+				}
+				return sourceInfo.Get(p)
+			},
+			func(w *writer, indent int, opt option) {
+				if first {
+					first = false
+				} else {
+					fmt.Fprint(w, ", ")
+				}
+				p.printOption(opt.name, opt.val, w, indent)
+				fmt.Fprint(w, " ") // trailing space
+			})
+	}
+	fmt.Fprint(w, "]")
+}
+
+func (p *Printer) printOptions(opts []option, w *writer, indent int, siFetch func(i int32) *descriptor.SourceCodeInfo_Location, fn func(w *writer, indent int, opt option)) {
+	for i, opt := range opts {
+		si := siFetch(int32(i))
+		p.printElement(false, si, w, indent, func(w *writer) {
+			fn(w, indent, opt)
+		})
+	}
+}
+
+func inline(indent int) int {
+	if indent < 0 {
+		// already inlined
+		return indent
+	}
+	// negative indent means inline; indent 2 stops further in case value wraps
+	return -indent - 2
+}
+
+func sortKeys(m map[interface{}]interface{}) []interface{} {
+	res := make(sortedKeys, len(m))
+	i := 0
+	for k := range m {
+		res[i] = k
+		i++
+	}
+	sort.Sort(res)
+	return ([]interface{})(res)
+}
+
+type sortedKeys []interface{}
+
+func (k sortedKeys) Len() int {
+	return len(k)
+}
+
+func (k sortedKeys) Swap(i, j int) {
+	k[i], k[j] = k[j], k[i]
+}
+
+func (k sortedKeys) Less(i, j int) bool {
+	switch i := k[i].(type) {
+	case int32:
+		return i < k[j].(int32)
+	case uint32:
+		return i < k[j].(uint32)
+	case int64:
+		return i < k[j].(int64)
+	case uint64:
+		return i < k[j].(uint64)
+	case string:
+		return i < k[j].(string)
+	case bool:
+		return !i && k[j].(bool)
+	default:
+		panic(fmt.Sprintf("invalid type for map key: %T", i))
+	}
+}
+
+func (p *Printer) printOption(name string, optVal interface{}, w *writer, indent int) {
+	fmt.Fprintf(w, "%s = ", name)
+
+	switch optVal := optVal.(type) {
+	case int32, uint32, int64, uint64:
+		fmt.Fprintf(w, "%d", optVal)
+	case float32, float64:
+		fmt.Fprintf(w, "%f", optVal)
+	case string:
+		fmt.Fprintf(w, "%s", quotedString(optVal))
+	case []byte:
+		fmt.Fprintf(w, "%s", quotedString(string(optVal)))
+	case bool:
+		fmt.Fprintf(w, "%v", optVal)
+	case ident:
+		fmt.Fprintf(w, "%s", optVal)
+	case *desc.EnumValueDescriptor:
+		fmt.Fprintf(w, "%s", optVal.GetName())
+	case proto.Message:
+		// TODO: if value is too long, marshal to text format with indentation to
+		// make output prettier (also requires correctly indenting subsequent lines)
+
+		// TODO: alternate approach so we can apply p.ForceFullyQualifiedNames
+		// inside the resulting value?
+
+		fmt.Fprintf(w, "{ %s }", proto.CompactTextString(optVal))
+	default:
+		panic(fmt.Sprintf("unknown type of value %T for field %s", optVal, name))
+	}
+}
+
+type edgeKind int
+
+const (
+	edgeKindOption edgeKind = iota
+	edgeKindFile
+	edgeKindMessage
+	edgeKindField
+	edgeKindOneOf
+	edgeKindExtensionRange
+	edgeKindEnum
+	edgeKindEnumVal
+	edgeKindService
+	edgeKindMethod
+)
+
+// edges in simple state machine for matching options paths
+// whose prefix should be included in source info to handle
+// the way options are printed (which cannot always include
+// the full path from original source)
+var edges = map[edgeKind]map[int32]edgeKind{
+	edgeKindFile: {
+		internal.File_optionsTag:    edgeKindOption,
+		internal.File_messagesTag:   edgeKindMessage,
+		internal.File_enumsTag:      edgeKindEnum,
+		internal.File_extensionsTag: edgeKindField,
+		internal.File_servicesTag:   edgeKindService,
+	},
+	edgeKindMessage: {
+		internal.Message_optionsTag:        edgeKindOption,
+		internal.Message_fieldsTag:         edgeKindField,
+		internal.Message_oneOfsTag:         edgeKindOneOf,
+		internal.Message_nestedMessagesTag: edgeKindMessage,
+		internal.Message_enumsTag:          edgeKindEnum,
+		internal.Message_extensionsTag:     edgeKindField,
+		internal.Message_extensionRangeTag: edgeKindExtensionRange,
+		// TODO: reserved range tag
+	},
+	edgeKindField: {
+		internal.Field_optionsTag: edgeKindOption,
+	},
+	edgeKindOneOf: {
+		internal.OneOf_optionsTag: edgeKindOption,
+	},
+	edgeKindExtensionRange: {
+		internal.ExtensionRange_optionsTag: edgeKindOption,
+	},
+	edgeKindEnum: {
+		internal.Enum_optionsTag: edgeKindOption,
+		internal.Enum_valuesTag:  edgeKindEnumVal,
+	},
+	edgeKindEnumVal: {
+		internal.EnumVal_optionsTag: edgeKindOption,
+	},
+	edgeKindService: {
+		internal.Service_optionsTag: edgeKindOption,
+		internal.Service_methodsTag: edgeKindMethod,
+	},
+	edgeKindMethod: {
+		internal.Method_optionsTag: edgeKindOption,
+	},
+}
+
+func extendOptionLocations(sc internal.SourceInfoMap) {
+	for _, loc := range sc {
+		allowed := edges[edgeKindFile]
+		for i := 0; i+1 < len(loc.Path); i += 2 {
+			nextKind, ok := allowed[loc.Path[i]]
+			if !ok {
+				break
+			}
+			if nextKind == edgeKindOption {
+				// We've found an option entry. This could be arbitrarily
+				// deep (for options that nested messages) or it could end
+				// abruptly (for non-repeated fields). But we need a path
+				// that is exactly the path-so-far plus two: the option tag
+				// and an optional index for repeated option fields (zero
+				// for non-repeated option fields). This is used for
+				// querying source info when printing options.
+				// for sorting elements
+				newPath := make([]int32, i+3)
+				copy(newPath, loc.Path)
+				sc.PutIfAbsent(newPath, loc)
+				// we do another path of path-so-far plus two, but with
+				// explicit zero index -- just in case this actual path has
+				// an extra path element, but it's not an index (e.g the
+				// option field is not repeated, but the source info we are
+				// looking at indicates a tag of a nested field)
+				newPath[len(newPath)-1] = 0
+				sc.PutIfAbsent(newPath, loc)
+				// finally, we need the path-so-far plus one, just the option
+				// tag, for sorting option groups
+				newPath = newPath[:len(newPath)-1]
+				sc.PutIfAbsent(newPath, loc)
+
+				break
+			} else {
+				allowed = edges[nextKind]
+			}
+		}
+	}
+}
+
+func (p *Printer) extractOptions(dsc desc.Descriptor, opts proto.Message, mf *dynamic.MessageFactory) (map[int32][]option, error) {
+	md, err := desc.LoadMessageDescriptorForMessage(opts)
+	if err != nil {
+		return nil, err
+	}
+	dm := mf.NewDynamicMessage(md)
+	if err = dm.ConvertFrom(opts); err != nil {
+		return nil, fmt.Errorf("failed convert %s to dynamic message: %v", md.GetFullyQualifiedName(), err)
+	}
+
+	pkg := dsc.GetFile().GetPackage()
+	var scope string
+	if _, ok := dsc.(*desc.FileDescriptor); ok {
+		scope = pkg
+	} else {
+		scope = dsc.GetFullyQualifiedName()
+	}
+
+	options := map[int32][]option{}
+	var uninterpreted []interface{}
+	for _, fldset := range [][]*desc.FieldDescriptor{md.GetFields(), mf.GetExtensionRegistry().AllExtensionsForType(md.GetFullyQualifiedName())} {
+		for _, fld := range fldset {
+			if dm.HasField(fld) {
+				val := dm.GetField(fld)
+				var opts []option
+				var name string
+				if fld.IsExtension() {
+					name = fmt.Sprintf("(%s)", p.qualifyName(pkg, scope, fld.GetFullyQualifiedName()))
+				} else {
+					name = fld.GetName()
+				}
+				switch val := val.(type) {
+				case []interface{}:
+					if fld.GetNumber() == internal.UninterpretedOptionsTag {
+						// we handle uninterpreted options differently
+						uninterpreted = val
+						continue
+					}
+
+					for _, e := range val {
+						if fld.GetType() == descriptor.FieldDescriptorProto_TYPE_ENUM {
+							ev := fld.GetEnumType().FindValueByNumber(e.(int32))
+							if ev == nil {
+								// have to skip unknown enum values :(
+								continue
+							}
+							e = ev
+						}
+						var name string
+						if fld.IsExtension() {
+							name = fmt.Sprintf("(%s)", p.qualifyName(pkg, scope, fld.GetFullyQualifiedName()))
+						} else {
+							name = fld.GetName()
+						}
+						opts = append(opts, option{name: name, val: e})
+					}
+				case map[interface{}]interface{}:
+					for k := range sortKeys(val) {
+						v := val[k]
+						vf := fld.GetMapValueType()
+						if vf.GetType() == descriptor.FieldDescriptorProto_TYPE_ENUM {
+							ev := vf.GetEnumType().FindValueByNumber(v.(int32))
+							if ev == nil {
+								// have to skip unknown enum values :(
+								continue
+							}
+							v = ev
+						}
+						entry := mf.NewDynamicMessage(fld.GetMessageType())
+						entry.SetFieldByNumber(1, k)
+						entry.SetFieldByNumber(2, v)
+						opts = append(opts, option{name: name, val: entry})
+					}
+				default:
+					if fld.GetType() == descriptor.FieldDescriptorProto_TYPE_ENUM {
+						ev := fld.GetEnumType().FindValueByNumber(val.(int32))
+						if ev == nil {
+							// have to skip unknown enum values :(
+							continue
+						}
+						val = ev
+					}
+					opts = append(opts, option{name: name, val: val})
+				}
+				if len(opts) > 0 {
+					options[fld.GetNumber()] = opts
+				}
+			}
+		}
+	}
+
+	// if there are uninterpreted options, add those too
+	if len(uninterpreted) > 0 {
+		opts := make([]option, len(uninterpreted))
+		for i, u := range uninterpreted {
+			var unint *descriptor.UninterpretedOption
+			if un, ok := u.(*descriptor.UninterpretedOption); ok {
+				unint = un
+			} else {
+				dm := u.(*dynamic.Message)
+				unint = &descriptor.UninterpretedOption{}
+				if err := dm.ConvertTo(unint); err != nil {
+					return nil, err
+				}
+			}
+
+			var buf bytes.Buffer
+			for ni, n := range unint.Name {
+				if ni > 0 {
+					buf.WriteByte('.')
+				}
+				if n.GetIsExtension() {
+					fmt.Fprintf(&buf, "(%s)", n.GetNamePart())
+				} else {
+					buf.WriteString(n.GetNamePart())
+				}
+			}
+
+			var v interface{}
+			switch {
+			case unint.IdentifierValue != nil:
+				v = ident(unint.GetIdentifierValue())
+			case unint.StringValue != nil:
+				v = string(unint.GetStringValue())
+			case unint.DoubleValue != nil:
+				v = unint.GetDoubleValue()
+			case unint.PositiveIntValue != nil:
+				v = unint.GetPositiveIntValue()
+			case unint.NegativeIntValue != nil:
+				v = unint.GetNegativeIntValue()
+			case unint.AggregateValue != nil:
+				v = ident(unint.GetAggregateValue())
+			}
+
+			opts[i] = option{name: buf.String(), val: v}
+		}
+		options[internal.UninterpretedOptionsTag] = opts
+	}
+
+	return options, nil
+}
+
+func optionsAsElementAddrs(optionsTag int32, order int, opts map[int32][]option) []elementAddr {
+	var optAddrs []elementAddr
+	for tag := range opts {
+		optAddrs = append(optAddrs, elementAddr{elementType: optionsTag, elementIndex: int(tag), order: order})
+	}
+	sort.Sort(optionsByName{addrs: optAddrs, opts: opts})
+	return optAddrs
+}
+
+// quotedString implements the text format for string literals for protocol
+// buffers. This form is also acceptable for string literals in option values
+// by the protocol buffer compiler, protoc.
+func quotedString(s string) string {
+	var b bytes.Buffer
+	// use WriteByte here to get any needed indent
+	b.WriteByte('"')
+	// Loop over the bytes, not the runes.
+	for i := 0; i < len(s); i++ {
+		// Divergence from C++: we don't escape apostrophes.
+		// There's no need to escape them, and the C++ parser
+		// copes with a naked apostrophe.
+		switch c := s[i]; c {
+		case '\n':
+			b.WriteString("\\n")
+		case '\r':
+			b.WriteString("\\r")
+		case '\t':
+			b.WriteString("\\t")
+		case '"':
+			b.WriteString("\\")
+		case '\\':
+			b.WriteString("\\\\")
+		default:
+			if c >= 0x20 && c < 0x7f {
+				b.WriteByte(c)
+			} else {
+				fmt.Fprintf(&b, "\\%03o", c)
+			}
+		}
+	}
+	b.WriteByte('"')
+
+	return b.String()
+}
+
+type elementAddr struct {
+	elementType  int32
+	elementIndex int
+	order        int
+}
+
+type elementAddrs struct {
+	addrs []elementAddr
+	dsc   interface{}
+	opts  map[int32][]option
+}
+
+func (a elementAddrs) Len() int {
+	return len(a.addrs)
+}
+
+func (a elementAddrs) Less(i, j int) bool {
+	// explicit order is considered first
+	if a.addrs[i].order < a.addrs[j].order {
+		return true
+	} else if a.addrs[i].order > a.addrs[j].order {
+		return false
+	}
+	// if order is equal, sort by element type
+	if a.addrs[i].elementType < a.addrs[j].elementType {
+		return true
+	} else if a.addrs[i].elementType > a.addrs[j].elementType {
+		return false
+	}
+
+	di := a.at(a.addrs[i])
+	dj := a.at(a.addrs[j])
+
+	switch vi := di.(type) {
+	case *desc.FieldDescriptor:
+		// fields are ordered by tag number
+		vj := dj.(*desc.FieldDescriptor)
+		// regular fields before extensions; extensions grouped by extendee
+		if !vi.IsExtension() && vj.IsExtension() {
+			return true
+		} else if vi.IsExtension() && !vj.IsExtension() {
+			return false
+		} else if vi.IsExtension() && vj.IsExtension() {
+			if vi.GetOwner() != vj.GetOwner() {
+				return vi.GetOwner().GetFullyQualifiedName() < vj.GetOwner().GetFullyQualifiedName()
+			}
+		}
+		return vi.GetNumber() < vj.GetNumber()
+
+	case *desc.EnumValueDescriptor:
+		// enum values ordered by number then name
+		vj := dj.(*desc.EnumValueDescriptor)
+		if vi.GetNumber() == vj.GetNumber() {
+			return vi.GetName() < vj.GetName()
+		}
+		return vi.GetNumber() < vj.GetNumber()
+
+	case *descriptor.DescriptorProto_ExtensionRange:
+		// extension ranges ordered by tag
+		return vi.GetStart() < dj.(*descriptor.DescriptorProto_ExtensionRange).GetStart()
+
+	case reservedRange:
+		// reserved ranges ordered by tag, too
+		return vi.start < dj.(reservedRange).start
+
+	case string:
+		// reserved names lexically sorted
+		return vi < dj.(string)
+
+	case pkg:
+		// reserved names lexically sorted
+		return vi < dj.(pkg)
+
+	case imp:
+		// reserved names lexically sorted
+		return vi < dj.(imp)
+
+	case []option:
+		// options sorted by name, extensions last
+		return optionLess(vi, dj.([]option))
+
+	default:
+		// all other descriptors ordered by name
+		return di.(desc.Descriptor).GetName() < dj.(desc.Descriptor).GetName()
+	}
+}
+
+func (a elementAddrs) Swap(i, j int) {
+	a.addrs[i], a.addrs[j] = a.addrs[j], a.addrs[i]
+}
+
+func (a elementAddrs) at(addr elementAddr) interface{} {
+	switch dsc := a.dsc.(type) {
+	case *desc.FileDescriptor:
+		switch addr.elementType {
+		case internal.File_packageTag:
+			return pkg(dsc.GetPackage())
+		case internal.File_dependencyTag:
+			return imp(dsc.AsFileDescriptorProto().GetDependency()[addr.elementIndex])
+		case internal.File_optionsTag:
+			return a.opts[int32(addr.elementIndex)]
+		case internal.File_messagesTag:
+			return dsc.GetMessageTypes()[addr.elementIndex]
+		case internal.File_enumsTag:
+			return dsc.GetEnumTypes()[addr.elementIndex]
+		case internal.File_servicesTag:
+			return dsc.GetServices()[addr.elementIndex]
+		case internal.File_extensionsTag:
+			return dsc.GetExtensions()[addr.elementIndex]
+		}
+	case *desc.MessageDescriptor:
+		switch addr.elementType {
+		case internal.Message_optionsTag:
+			return a.opts[int32(addr.elementIndex)]
+		case internal.Message_fieldsTag:
+			return dsc.GetFields()[addr.elementIndex]
+		case internal.Message_nestedMessagesTag:
+			return dsc.GetNestedMessageTypes()[addr.elementIndex]
+		case internal.Message_enumsTag:
+			return dsc.GetNestedEnumTypes()[addr.elementIndex]
+		case internal.Message_extensionsTag:
+			return dsc.GetNestedExtensions()[addr.elementIndex]
+		case internal.Message_extensionRangeTag:
+			return dsc.AsDescriptorProto().GetExtensionRange()[addr.elementIndex]
+		case internal.Message_reservedRangeTag:
+			rng := dsc.AsDescriptorProto().GetReservedRange()[addr.elementIndex]
+			return reservedRange{start: rng.GetStart(), end: rng.GetEnd() - 1}
+		case internal.Message_reservedNameTag:
+			return dsc.AsDescriptorProto().GetReservedName()[addr.elementIndex]
+		}
+	case *desc.FieldDescriptor:
+		if addr.elementType == internal.Field_optionsTag {
+			return a.opts[int32(addr.elementIndex)]
+		}
+	case *desc.OneOfDescriptor:
+		switch addr.elementType {
+		case internal.OneOf_optionsTag:
+			return a.opts[int32(addr.elementIndex)]
+		case -internal.Message_fieldsTag:
+			return dsc.GetOwner().GetFields()[addr.elementIndex]
+		}
+	case *desc.EnumDescriptor:
+		switch addr.elementType {
+		case internal.Enum_optionsTag:
+			return a.opts[int32(addr.elementIndex)]
+		case internal.Enum_valuesTag:
+			return dsc.GetValues()[addr.elementIndex]
+		case internal.Enum_reservedRangeTag:
+			rng := dsc.AsEnumDescriptorProto().GetReservedRange()[addr.elementIndex]
+			return reservedRange{start: rng.GetStart(), end: rng.GetEnd()}
+		case internal.Enum_reservedNameTag:
+			return dsc.AsEnumDescriptorProto().GetReservedName()[addr.elementIndex]
+		}
+	case *desc.EnumValueDescriptor:
+		if addr.elementType == internal.EnumVal_optionsTag {
+			return a.opts[int32(addr.elementIndex)]
+		}
+	case *desc.ServiceDescriptor:
+		switch addr.elementType {
+		case internal.Service_optionsTag:
+			return a.opts[int32(addr.elementIndex)]
+		case internal.Service_methodsTag:
+			return dsc.GetMethods()[addr.elementIndex]
+		}
+	case *desc.MethodDescriptor:
+		if addr.elementType == internal.Method_optionsTag {
+			return a.opts[int32(addr.elementIndex)]
+		}
+	case extensionRange:
+		if addr.elementType == internal.ExtensionRange_optionsTag {
+			return a.opts[int32(addr.elementIndex)]
+		}
+	}
+
+	panic(fmt.Sprintf("location for unknown field %d of %T", addr.elementType, a.dsc))
+}
+
+type extensionRange struct {
+	owner    *desc.MessageDescriptor
+	extRange *descriptor.DescriptorProto_ExtensionRange
+}
+
+type elementSrcOrder struct {
+	elementAddrs
+	sourceInfo internal.SourceInfoMap
+	prefix     []int32
+}
+
+func (a elementSrcOrder) Less(i, j int) bool {
+	ti := a.addrs[i].elementType
+	ei := a.addrs[i].elementIndex
+
+	tj := a.addrs[j].elementType
+	ej := a.addrs[j].elementIndex
+
+	var si, sj *descriptor.SourceCodeInfo_Location
+	if ei < 0 {
+		si = a.sourceInfo.Get(append(a.prefix, -int32(ei)))
+	} else if ti < 0 {
+		p := make([]int32, len(a.prefix)-2)
+		copy(p, a.prefix)
+		si = a.sourceInfo.Get(append(p, ti, int32(ei)))
+	} else {
+		si = a.sourceInfo.Get(append(a.prefix, ti, int32(ei)))
+	}
+	if ej < 0 {
+		sj = a.sourceInfo.Get(append(a.prefix, -int32(ej)))
+	} else if tj < 0 {
+		p := make([]int32, len(a.prefix)-2)
+		copy(p, a.prefix)
+		sj = a.sourceInfo.Get(append(p, tj, int32(ej)))
+	} else {
+		sj = a.sourceInfo.Get(append(a.prefix, tj, int32(ej)))
+	}
+
+	if (si == nil) != (sj == nil) {
+		// generally, we put unknown elements after known ones;
+		// except package and option elements go first
+
+		// i will be unknown and j will be known
+		swapped := false
+		if si != nil {
+			si, sj = sj, si
+			// no need to swap ti and tj because we don't use tj anywhere below
+			ti = tj
+			swapped = true
+		}
+		switch a.dsc.(type) {
+		case *desc.FileDescriptor:
+			if ti == internal.File_packageTag || ti == internal.File_optionsTag {
+				return !swapped
+			}
+		case *desc.MessageDescriptor:
+			if ti == internal.Message_optionsTag {
+				return !swapped
+			}
+		case *desc.EnumDescriptor:
+			if ti == internal.Enum_optionsTag {
+				return !swapped
+			}
+		case *desc.ServiceDescriptor:
+			if ti == internal.Service_optionsTag {
+				return !swapped
+			}
+		}
+		return swapped
+
+	} else if si == nil || sj == nil {
+		// let stable sort keep unknown elements in same relative order
+		return false
+	}
+
+	for idx := 0; idx < len(sj.Span); idx++ {
+		if idx >= len(si.Span) {
+			return true
+		}
+		if si.Span[idx] < sj.Span[idx] {
+			return true
+		}
+		if si.Span[idx] > sj.Span[idx] {
+			return false
+		}
+	}
+	return false
+}
+
+type optionsByName struct {
+	addrs []elementAddr
+	opts  map[int32][]option
+}
+
+func (o optionsByName) Len() int {
+	return len(o.addrs)
+}
+
+func (o optionsByName) Less(i, j int) bool {
+	oi := o.opts[int32(o.addrs[i].elementIndex)]
+	oj := o.opts[int32(o.addrs[j].elementIndex)]
+	return optionLess(oi, oj)
+}
+
+func optionLess(i, j []option) bool {
+	ni := i[0].name
+	nj := j[0].name
+	if ni[0] != '(' && nj[0] == '(' {
+		return true
+	} else if ni[0] == '(' && nj[0] != '(' {
+		return false
+	}
+	return ni < nj
+}
+
+func (o optionsByName) Swap(i, j int) {
+	o.addrs[i], o.addrs[j] = o.addrs[j], o.addrs[i]
+}
+
+func (p *Printer) printElement(isDecriptor bool, si *descriptor.SourceCodeInfo_Location, w *writer, indent int, el func(*writer)) {
+	includeComments := isDecriptor || p.includeCommentType(CommentsTokens)
+
+	if includeComments && si != nil {
+		p.printLeadingComments(si, w, indent)
+	}
+	el(w)
+	if includeComments && si != nil {
+		p.printTrailingComments(si, w, indent)
+	}
+	if indent >= 0 && !w.newline {
+		// if we're not printing inline but element did not have trailing newline, add one now
+		fmt.Fprintln(w)
+	}
+}
+
+func (p *Printer) printElementString(si *descriptor.SourceCodeInfo_Location, w *writer, indent int, str string) {
+	p.printElement(false, si, w, inline(indent), func(w *writer) {
+		fmt.Fprintf(w, "%s ", str)
+	})
+}
+
+func (p *Printer) includeCommentType(c CommentType) bool {
+	return (p.OmitComments & c) == 0
+}
+
+func (p *Printer) printLeadingComments(si *descriptor.SourceCodeInfo_Location, w *writer, indent int) bool {
+	endsInNewLine := false
+
+	if p.includeCommentType(CommentsDetached) {
+		for _, c := range si.GetLeadingDetachedComments() {
+			if p.printComment(c, w, indent, true) {
+				// if comment ended in newline, add another newline to separate
+				// this comment from the next
+				p.newLine(w)
+				endsInNewLine = true
+			} else if indent < 0 {
+				// comment did not end in newline and we are trying to inline?
+				// just add a space to separate this comment from what follows
+				fmt.Fprint(w, " ")
+				endsInNewLine = false
+			} else {
+				// comment did not end in newline and we are *not* trying to inline?
+				// add newline to end of comment and add another to separate this
+				// comment from what follows
+				fmt.Fprintln(w) // needed to end comment, regardless of p.Compact
+				p.newLine(w)
+				endsInNewLine = true
+			}
+		}
+	}
+
+	if p.includeCommentType(CommentsLeading) && si.GetLeadingComments() != "" {
+		endsInNewLine = p.printComment(si.GetLeadingComments(), w, indent, true)
+		if !endsInNewLine {
+			if indent >= 0 {
+				// leading comment didn't end with newline but needs one
+				// (because we're *not* inlining)
+				fmt.Fprintln(w) // needed to end comment, regardless of p.Compact
+				endsInNewLine = true
+			} else {
+				// space between comment and following element when inlined
+				fmt.Fprint(w, " ")
+			}
+		}
+	}
+
+	return endsInNewLine
+}
+
+func (p *Printer) printTrailingComments(si *descriptor.SourceCodeInfo_Location, w *writer, indent int) {
+	if p.includeCommentType(CommentsTrailing) && si.GetTrailingComments() != "" {
+		if !p.printComment(si.GetTrailingComments(), w, indent, p.TrailingCommentsOnSeparateLine) && indent >= 0 {
+			// trailing comment didn't end with newline but needs one
+			// (because we're *not* inlining)
+			fmt.Fprintln(w) // needed to end comment, regardless of p.Compact
+		} else if indent < 0 {
+			fmt.Fprint(w, " ")
+		}
+	}
+}
+
+func (p *Printer) printComment(comments string, w *writer, indent int, forceNextLine bool) bool {
+	if comments == "" {
+		return false
+	}
+
+	var multiLine bool
+	if indent < 0 {
+		// use multi-line style when inlining
+		multiLine = true
+	} else {
+		multiLine = p.PreferMultiLineStyleComments
+	}
+	if multiLine && strings.Contains(comments, "*/") {
+		// can't emit '*/' in a multi-line style comment
+		multiLine = false
+	}
+
+	lines := strings.Split(comments, "\n")
+
+	// first, remove leading and trailing blank lines
+	if lines[0] == "" {
+		lines = lines[1:]
+	}
+	if lines[len(lines)-1] == "" {
+		lines = lines[:len(lines)-1]
+	}
+	if len(lines) == 0 {
+		return false
+	}
+
+	if indent >= 0 && !w.newline {
+		// last element did not have trailing newline, so we
+		// either need to tack on newline or, if comment is
+		// just one line, inline it on the end
+		if forceNextLine || len(lines) > 1 {
+			fmt.Fprintln(w)
+		} else {
+			if !w.space {
+				fmt.Fprint(w, " ")
+			}
+			indent = inline(indent)
+		}
+	}
+
+	if len(lines) == 1 && multiLine {
+		p.indent(w, indent)
+		line := lines[0]
+		if line[0] == ' ' && line[len(line)-1] != ' ' {
+			// add trailing space for symmetry
+			line += " "
+		}
+		fmt.Fprintf(w, "/*%s*/", line)
+		if indent >= 0 {
+			fmt.Fprintln(w)
+			return true
+		}
+		return false
+	}
+
+	if multiLine {
+		// multi-line style comments that actually span multiple lines
+		// get a blank line before and after so that comment renders nicely
+		lines = append(lines, "", "")
+		copy(lines[1:], lines)
+		lines[0] = ""
+	}
+
+	for i, l := range lines {
+		p.maybeIndent(w, indent, i > 0)
+		if multiLine {
+			if i == 0 {
+				// first line
+				fmt.Fprintf(w, "/*%s\n", strings.TrimRight(l, " \t"))
+			} else if i == len(lines)-1 {
+				// last line
+				if l == "" {
+					fmt.Fprint(w, " */")
+				} else {
+					fmt.Fprintf(w, " *%s*/", l)
+				}
+				if indent >= 0 {
+					fmt.Fprintln(w)
+				}
+			} else {
+				fmt.Fprintf(w, " *%s\n", strings.TrimRight(l, " \t"))
+			}
+		} else {
+			fmt.Fprintf(w, "//%s\n", strings.TrimRight(l, " \t"))
+		}
+	}
+
+	// single-line comments always end in newline; multi-line comments only
+	// end in newline for non-negative (e.g. non-inlined) indentation
+	return !multiLine || indent >= 0
+}
+
+func (p *Printer) indent(w io.Writer, indent int) {
+	for i := 0; i < indent; i++ {
+		fmt.Fprint(w, p.Indent)
+	}
+}
+
+func (p *Printer) maybeIndent(w io.Writer, indent int, requireIndent bool) {
+	if indent < 0 && requireIndent {
+		p.indent(w, -indent)
+	} else {
+		p.indent(w, indent)
+	}
+}
+
+type writer struct {
+	io.Writer
+	err     error
+	space   bool
+	newline bool
+}
+
+func newWriter(w io.Writer) *writer {
+	return &writer{Writer: w, newline: true}
+}
+
+func (w *writer) Write(p []byte) (int, error) {
+	if len(p) == 0 {
+		return 0, nil
+	}
+
+	w.newline = false
+
+	if w.space {
+		// skip any trailing space if the following
+		// character is semicolon, comma, or close bracket
+		if p[0] != ';' && p[0] != ',' && p[0] != ']' {
+			_, err := w.Writer.Write([]byte{' '})
+			if err != nil {
+				w.err = err
+				return 0, err
+			}
+		}
+		w.space = false
+	}
+
+	if p[len(p)-1] == ' ' {
+		w.space = true
+		p = p[:len(p)-1]
+	}
+	if len(p) > 0 && p[len(p)-1] == '\n' {
+		w.newline = true
+	}
+
+	num, err := w.Writer.Write(p)
+	if err != nil {
+		w.err = err
+	} else if w.space {
+		// pretend space was written
+		num++
+	}
+	return num, err
+}