Import of https://github.com/ciena/voltctl at commit 40d61fbf3f910ed4017cf67c9c79e8e1f82a33a5

Change-Id: I8464c59e60d76cb8612891db3303878975b5416c
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/.gitignore b/vendor/github.com/jhump/protoreflect/desc/protoparse/.gitignore
new file mode 100644
index 0000000..2652053
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/.gitignore
@@ -0,0 +1 @@
+y.output
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast.go
new file mode 100644
index 0000000..2499917
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast.go
@@ -0,0 +1,1081 @@
+package protoparse
+
+import "fmt"
+
+// This file defines all of the nodes in the proto AST.
+
+// ErrorWithSourcePos is an error about a proto source file includes information
+// about the location in the file that caused the error.
+type ErrorWithSourcePos struct {
+	Underlying error
+	Pos        *SourcePos
+}
+
+// Error implements the error interface
+func (e ErrorWithSourcePos) Error() string {
+	if e.Pos.Line <= 0 || e.Pos.Col <= 0 {
+		return fmt.Sprintf("%s: %v", e.Pos.Filename, e.Underlying)
+	}
+	return fmt.Sprintf("%s:%d:%d: %v", e.Pos.Filename, e.Pos.Line, e.Pos.Col, e.Underlying)
+}
+
+// SourcePos identifies a location in a proto source file.
+type SourcePos struct {
+	Filename  string
+	Line, Col int
+	Offset    int
+}
+
+func unknownPos(filename string) *SourcePos {
+	return &SourcePos{Filename: filename}
+}
+
+type node interface {
+	start() *SourcePos
+	end() *SourcePos
+	leadingComments() []*comment
+	trailingComments() []*comment
+}
+
+type terminalNode interface {
+	node
+	popLeadingComment() *comment
+	pushTrailingComment(*comment)
+}
+
+var _ terminalNode = (*basicNode)(nil)
+var _ terminalNode = (*stringLiteralNode)(nil)
+var _ terminalNode = (*intLiteralNode)(nil)
+var _ terminalNode = (*floatLiteralNode)(nil)
+var _ terminalNode = (*identNode)(nil)
+
+type fileDecl interface {
+	node
+	getSyntax() node
+}
+
+var _ fileDecl = (*fileNode)(nil)
+var _ fileDecl = (*noSourceNode)(nil)
+
+type optionDecl interface {
+	node
+	getName() node
+	getValue() valueNode
+}
+
+var _ optionDecl = (*optionNode)(nil)
+var _ optionDecl = (*noSourceNode)(nil)
+
+type fieldDecl interface {
+	node
+	fieldLabel() node
+	fieldName() node
+	fieldType() node
+	fieldTag() node
+	fieldExtendee() node
+	getGroupKeyword() node
+}
+
+var _ fieldDecl = (*fieldNode)(nil)
+var _ fieldDecl = (*groupNode)(nil)
+var _ fieldDecl = (*mapFieldNode)(nil)
+var _ fieldDecl = (*syntheticMapField)(nil)
+var _ fieldDecl = (*noSourceNode)(nil)
+
+type rangeDecl interface {
+	node
+	rangeStart() node
+	rangeEnd() node
+}
+
+var _ rangeDecl = (*rangeNode)(nil)
+var _ rangeDecl = (*noSourceNode)(nil)
+
+type enumValueDecl interface {
+	node
+	getName() node
+	getNumber() node
+}
+
+var _ enumValueDecl = (*enumValueNode)(nil)
+var _ enumValueDecl = (*noSourceNode)(nil)
+
+type msgDecl interface {
+	node
+	messageName() node
+	reservedNames() []*stringLiteralNode
+}
+
+var _ msgDecl = (*messageNode)(nil)
+var _ msgDecl = (*groupNode)(nil)
+var _ msgDecl = (*mapFieldNode)(nil)
+var _ msgDecl = (*noSourceNode)(nil)
+
+type methodDecl interface {
+	node
+	getInputType() node
+	getOutputType() node
+}
+
+var _ methodDecl = (*methodNode)(nil)
+var _ methodDecl = (*noSourceNode)(nil)
+
+type posRange struct {
+	start, end *SourcePos
+}
+
+type basicNode struct {
+	posRange
+	leading  []*comment
+	trailing []*comment
+}
+
+func (n *basicNode) start() *SourcePos {
+	return n.posRange.start
+}
+
+func (n *basicNode) end() *SourcePos {
+	return n.posRange.end
+}
+
+func (n *basicNode) leadingComments() []*comment {
+	return n.leading
+}
+
+func (n *basicNode) trailingComments() []*comment {
+	return n.trailing
+}
+
+func (n *basicNode) popLeadingComment() *comment {
+	c := n.leading[0]
+	n.leading = n.leading[1:]
+	return c
+}
+
+func (n *basicNode) pushTrailingComment(c *comment) {
+	n.trailing = append(n.trailing, c)
+}
+
+type comment struct {
+	posRange
+	text string
+}
+
+type basicCompositeNode struct {
+	first node
+	last  node
+}
+
+func (n *basicCompositeNode) start() *SourcePos {
+	return n.first.start()
+}
+
+func (n *basicCompositeNode) end() *SourcePos {
+	return n.last.end()
+}
+
+func (n *basicCompositeNode) leadingComments() []*comment {
+	return n.first.leadingComments()
+}
+
+func (n *basicCompositeNode) trailingComments() []*comment {
+	return n.last.trailingComments()
+}
+
+func (n *basicCompositeNode) setRange(first, last node) {
+	n.first = first
+	n.last = last
+}
+
+type fileNode struct {
+	basicCompositeNode
+	syntax *syntaxNode
+	decls  []*fileElement
+
+	// These fields are populated after parsing, to make it easier to find them
+	// without searching decls. The parse result has a map of descriptors to
+	// nodes which makes the other declarations easily discoverable. But these
+	// elements do not map to descriptors -- they are just stored as strings in
+	// the file descriptor.
+	imports []*importNode
+	pkg     *packageNode
+}
+
+func (n *fileNode) getSyntax() node {
+	return n.syntax
+}
+
+type fileElement struct {
+	// a discriminated union: only one field will be set
+	imp     *importNode
+	pkg     *packageNode
+	option  *optionNode
+	message *messageNode
+	enum    *enumNode
+	extend  *extendNode
+	service *serviceNode
+	empty   *basicNode
+}
+
+func (n *fileElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *fileElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *fileElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *fileElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *fileElement) get() node {
+	switch {
+	case n.imp != nil:
+		return n.imp
+	case n.pkg != nil:
+		return n.pkg
+	case n.option != nil:
+		return n.option
+	case n.message != nil:
+		return n.message
+	case n.enum != nil:
+		return n.enum
+	case n.extend != nil:
+		return n.extend
+	case n.service != nil:
+		return n.service
+	default:
+		return n.empty
+	}
+}
+
+type syntaxNode struct {
+	basicCompositeNode
+	syntax *stringLiteralNode
+}
+
+type importNode struct {
+	basicCompositeNode
+	name   *stringLiteralNode
+	public bool
+	weak   bool
+}
+
+type packageNode struct {
+	basicCompositeNode
+	name *identNode
+}
+
+type identifier string
+
+type identKind int
+
+const (
+	identSimpleName identKind = iota
+	identQualified
+	identTypeName
+)
+
+type identNode struct {
+	basicNode
+	val  string
+	kind identKind
+}
+
+func (n *identNode) value() interface{} {
+	return identifier(n.val)
+}
+
+type optionNode struct {
+	basicCompositeNode
+	name *optionNameNode
+	val  valueNode
+}
+
+func (n *optionNode) getName() node {
+	return n.name
+}
+
+func (n *optionNode) getValue() valueNode {
+	return n.val
+}
+
+type optionNameNode struct {
+	basicCompositeNode
+	parts []*optionNamePartNode
+}
+
+type optionNamePartNode struct {
+	basicCompositeNode
+	text        *identNode
+	offset      int
+	length      int
+	isExtension bool
+	st, en      *SourcePos
+}
+
+func (n *optionNamePartNode) start() *SourcePos {
+	if n.isExtension {
+		return n.basicCompositeNode.start()
+	}
+	return n.st
+}
+
+func (n *optionNamePartNode) end() *SourcePos {
+	if n.isExtension {
+		return n.basicCompositeNode.end()
+	}
+	return n.en
+}
+
+func (n *optionNamePartNode) setRange(first, last node) {
+	n.basicCompositeNode.setRange(first, last)
+	if !n.isExtension {
+		st := *first.start()
+		st.Col += n.offset
+		n.st = &st
+		en := st
+		en.Col += n.length
+		n.en = &en
+	}
+}
+
+type valueNode interface {
+	node
+	value() interface{}
+}
+
+var _ valueNode = (*stringLiteralNode)(nil)
+var _ valueNode = (*intLiteralNode)(nil)
+var _ valueNode = (*negativeIntLiteralNode)(nil)
+var _ valueNode = (*floatLiteralNode)(nil)
+var _ valueNode = (*boolLiteralNode)(nil)
+var _ valueNode = (*sliceLiteralNode)(nil)
+var _ valueNode = (*aggregateLiteralNode)(nil)
+var _ valueNode = (*noSourceNode)(nil)
+
+type stringLiteralNode struct {
+	basicCompositeNode
+	val string
+}
+
+func (n *stringLiteralNode) value() interface{} {
+	return n.val
+}
+
+func (n *stringLiteralNode) popLeadingComment() *comment {
+	return n.first.(terminalNode).popLeadingComment()
+}
+
+func (n *stringLiteralNode) pushTrailingComment(c *comment) {
+	n.last.(terminalNode).pushTrailingComment(c)
+}
+
+type intLiteralNode struct {
+	basicNode
+	val uint64
+}
+
+func (n *intLiteralNode) value() interface{} {
+	return n.val
+}
+
+type negativeIntLiteralNode struct {
+	basicCompositeNode
+	val int64
+}
+
+func (n *negativeIntLiteralNode) value() interface{} {
+	return n.val
+}
+
+type floatLiteralNode struct {
+	basicCompositeNode
+	val float64
+}
+
+func (n *floatLiteralNode) value() interface{} {
+	return n.val
+}
+
+func (n *floatLiteralNode) popLeadingComment() *comment {
+	return n.first.(terminalNode).popLeadingComment()
+}
+
+func (n *floatLiteralNode) pushTrailingComment(c *comment) {
+	n.last.(terminalNode).pushTrailingComment(c)
+}
+
+type boolLiteralNode struct {
+	basicNode
+	val bool
+}
+
+func (n *boolLiteralNode) value() interface{} {
+	return n.val
+}
+
+type sliceLiteralNode struct {
+	basicCompositeNode
+	elements []valueNode
+}
+
+func (n *sliceLiteralNode) value() interface{} {
+	return n.elements
+}
+
+type aggregateLiteralNode struct {
+	basicCompositeNode
+	elements []*aggregateEntryNode
+}
+
+func (n *aggregateLiteralNode) value() interface{} {
+	return n.elements
+}
+
+type aggregateEntryNode struct {
+	basicCompositeNode
+	name *aggregateNameNode
+	val  valueNode
+}
+
+type aggregateNameNode struct {
+	basicCompositeNode
+	name        *identNode
+	isExtension bool
+}
+
+func (a *aggregateNameNode) value() string {
+	if a.isExtension {
+		return "[" + a.name.val + "]"
+	} else {
+		return a.name.val
+	}
+}
+
+type fieldNode struct {
+	basicCompositeNode
+	label   *labelNode
+	fldType *identNode
+	name    *identNode
+	tag     *intLiteralNode
+	options []*optionNode
+
+	// This field is populated after parsing, to allow lookup of extendee source
+	// locations when field extendees cannot be linked. (Otherwise, this is just
+	// stored as a string in the field descriptors defined inside the extend
+	// block).
+	extendee *extendNode
+}
+
+func (n *fieldNode) fieldLabel() node {
+	// proto3 fields and fields inside one-ofs will not have a label and we need
+	// this check in order to return a nil node -- otherwise we'd return a
+	// non-nil node that has a nil pointer value in it :/
+	if n.label == nil {
+		return nil
+	}
+	return n.label
+}
+
+func (n *fieldNode) fieldName() node {
+	return n.name
+}
+
+func (n *fieldNode) fieldType() node {
+	return n.fldType
+}
+
+func (n *fieldNode) fieldTag() node {
+	return n.tag
+}
+
+func (n *fieldNode) fieldExtendee() node {
+	if n.extendee != nil {
+		return n.extendee.extendee
+	}
+	return nil
+}
+
+func (n *fieldNode) getGroupKeyword() node {
+	return nil
+}
+
+type labelNode struct {
+	basicNode
+	repeated bool
+	required bool
+}
+
+type groupNode struct {
+	basicCompositeNode
+	groupKeyword *identNode
+	label        *labelNode
+	name         *identNode
+	tag          *intLiteralNode
+	decls        []*messageElement
+
+	// This field is populated after parsing, to make it easier to find them
+	// without searching decls. The parse result has a map of descriptors to
+	// nodes which makes the other declarations easily discoverable. But these
+	// elements do not map to descriptors -- they are just stored as strings in
+	// the message descriptor.
+	reserved []*stringLiteralNode
+	// This field is populated after parsing, to allow lookup of extendee source
+	// locations when field extendees cannot be linked. (Otherwise, this is just
+	// stored as a string in the field descriptors defined inside the extend
+	// block).
+	extendee *extendNode
+}
+
+func (n *groupNode) fieldLabel() node {
+	return n.label
+}
+
+func (n *groupNode) fieldName() node {
+	return n.name
+}
+
+func (n *groupNode) fieldType() node {
+	return n.name
+}
+
+func (n *groupNode) fieldTag() node {
+	return n.tag
+}
+
+func (n *groupNode) fieldExtendee() node {
+	if n.extendee != nil {
+		return n.extendee.extendee
+	}
+	return nil
+}
+
+func (n *groupNode) getGroupKeyword() node {
+	return n.groupKeyword
+}
+
+func (n *groupNode) messageName() node {
+	return n.name
+}
+
+func (n *groupNode) reservedNames() []*stringLiteralNode {
+	return n.reserved
+}
+
+type oneOfNode struct {
+	basicCompositeNode
+	name  *identNode
+	decls []*oneOfElement
+}
+
+type oneOfElement struct {
+	// a discriminated union: only one field will be set
+	option *optionNode
+	field  *fieldNode
+	empty  *basicNode
+}
+
+func (n *oneOfElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *oneOfElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *oneOfElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *oneOfElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *oneOfElement) get() node {
+	switch {
+	case n.option != nil:
+		return n.option
+	case n.field != nil:
+		return n.field
+	default:
+		return n.empty
+	}
+}
+
+type mapFieldNode struct {
+	basicCompositeNode
+	mapKeyword *identNode
+	keyType    *identNode
+	valueType  *identNode
+	name       *identNode
+	tag        *intLiteralNode
+	options    []*optionNode
+}
+
+func (n *mapFieldNode) fieldLabel() node {
+	return n.mapKeyword
+}
+
+func (n *mapFieldNode) fieldName() node {
+	return n.name
+}
+
+func (n *mapFieldNode) fieldType() node {
+	return n.mapKeyword
+}
+
+func (n *mapFieldNode) fieldTag() node {
+	return n.tag
+}
+
+func (n *mapFieldNode) fieldExtendee() node {
+	return nil
+}
+
+func (n *mapFieldNode) getGroupKeyword() node {
+	return nil
+}
+
+func (n *mapFieldNode) messageName() node {
+	return n.name
+}
+
+func (n *mapFieldNode) reservedNames() []*stringLiteralNode {
+	return nil
+}
+
+func (n *mapFieldNode) keyField() *syntheticMapField {
+	tag := &intLiteralNode{
+		basicNode: basicNode{
+			posRange: posRange{start: n.keyType.start(), end: n.keyType.end()},
+		},
+		val: 1,
+	}
+	return &syntheticMapField{ident: n.keyType, tag: tag}
+}
+
+func (n *mapFieldNode) valueField() *syntheticMapField {
+	tag := &intLiteralNode{
+		basicNode: basicNode{
+			posRange: posRange{start: n.valueType.start(), end: n.valueType.end()},
+		},
+		val: 2,
+	}
+	return &syntheticMapField{ident: n.valueType, tag: tag}
+}
+
+type syntheticMapField struct {
+	ident *identNode
+	tag   *intLiteralNode
+}
+
+func (n *syntheticMapField) start() *SourcePos {
+	return n.ident.start()
+}
+
+func (n *syntheticMapField) end() *SourcePos {
+	return n.ident.end()
+}
+
+func (n *syntheticMapField) leadingComments() []*comment {
+	return nil
+}
+
+func (n *syntheticMapField) trailingComments() []*comment {
+	return nil
+}
+
+func (n *syntheticMapField) fieldLabel() node {
+	return n.ident
+}
+
+func (n *syntheticMapField) fieldName() node {
+	return n.ident
+}
+
+func (n *syntheticMapField) fieldType() node {
+	return n.ident
+}
+
+func (n *syntheticMapField) fieldTag() node {
+	return n.tag
+}
+
+func (n *syntheticMapField) fieldExtendee() node {
+	return nil
+}
+
+func (n *syntheticMapField) getGroupKeyword() node {
+	return nil
+}
+
+type extensionRangeNode struct {
+	basicCompositeNode
+	ranges  []*rangeNode
+	options []*optionNode
+}
+
+type rangeNode struct {
+	basicCompositeNode
+	stNode, enNode node
+	st, en         int32
+}
+
+func (n *rangeNode) rangeStart() node {
+	return n.stNode
+}
+
+func (n *rangeNode) rangeEnd() node {
+	return n.enNode
+}
+
+type reservedNode struct {
+	basicCompositeNode
+	ranges []*rangeNode
+	names  []*stringLiteralNode
+}
+
+type enumNode struct {
+	basicCompositeNode
+	name  *identNode
+	decls []*enumElement
+
+	// This field is populated after parsing, to make it easier to find them
+	// without searching decls. The parse result has a map of descriptors to
+	// nodes which makes the other declarations easily discoverable. But these
+	// elements do not map to descriptors -- they are just stored as strings in
+	// the message descriptor.
+	reserved []*stringLiteralNode
+}
+
+type enumElement struct {
+	// a discriminated union: only one field will be set
+	option   *optionNode
+	value    *enumValueNode
+	reserved *reservedNode
+	empty    *basicNode
+}
+
+func (n *enumElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *enumElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *enumElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *enumElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *enumElement) get() node {
+	switch {
+	case n.option != nil:
+		return n.option
+	case n.value != nil:
+		return n.value
+	default:
+		return n.empty
+	}
+}
+
+type enumValueNode struct {
+	basicCompositeNode
+	name    *identNode
+	options []*optionNode
+
+	// only one of these two will be set:
+
+	numberP *intLiteralNode         // positive numeric value
+	numberN *negativeIntLiteralNode // negative numeric value
+}
+
+func (n *enumValueNode) getName() node {
+	return n.name
+}
+
+func (n *enumValueNode) getNumber() node {
+	if n.numberP != nil {
+		return n.numberP
+	}
+	return n.numberN
+}
+
+type messageNode struct {
+	basicCompositeNode
+	name  *identNode
+	decls []*messageElement
+
+	// This field is populated after parsing, to make it easier to find them
+	// without searching decls. The parse result has a map of descriptors to
+	// nodes which makes the other declarations easily discoverable. But these
+	// elements do not map to descriptors -- they are just stored as strings in
+	// the message descriptor.
+	reserved []*stringLiteralNode
+}
+
+func (n *messageNode) messageName() node {
+	return n.name
+}
+
+func (n *messageNode) reservedNames() []*stringLiteralNode {
+	return n.reserved
+}
+
+type messageElement struct {
+	// a discriminated union: only one field will be set
+	option         *optionNode
+	field          *fieldNode
+	mapField       *mapFieldNode
+	oneOf          *oneOfNode
+	group          *groupNode
+	nested         *messageNode
+	enum           *enumNode
+	extend         *extendNode
+	extensionRange *extensionRangeNode
+	reserved       *reservedNode
+	empty          *basicNode
+}
+
+func (n *messageElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *messageElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *messageElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *messageElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *messageElement) get() node {
+	switch {
+	case n.option != nil:
+		return n.option
+	case n.field != nil:
+		return n.field
+	case n.mapField != nil:
+		return n.mapField
+	case n.oneOf != nil:
+		return n.oneOf
+	case n.group != nil:
+		return n.group
+	case n.nested != nil:
+		return n.nested
+	case n.enum != nil:
+		return n.enum
+	case n.extend != nil:
+		return n.extend
+	case n.extensionRange != nil:
+		return n.extensionRange
+	case n.reserved != nil:
+		return n.reserved
+	default:
+		return n.empty
+	}
+}
+
+type extendNode struct {
+	basicCompositeNode
+	extendee *identNode
+	decls    []*extendElement
+}
+
+type extendElement struct {
+	// a discriminated union: only one field will be set
+	field *fieldNode
+	group *groupNode
+	empty *basicNode
+}
+
+func (n *extendElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *extendElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *extendElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *extendElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *extendElement) get() node {
+	switch {
+	case n.field != nil:
+		return n.field
+	case n.group != nil:
+		return n.group
+	default:
+		return n.empty
+	}
+}
+
+type serviceNode struct {
+	basicCompositeNode
+	name  *identNode
+	decls []*serviceElement
+}
+
+type serviceElement struct {
+	// a discriminated union: only one field will be set
+	option *optionNode
+	rpc    *methodNode
+	empty  *basicNode
+}
+
+func (n *serviceElement) start() *SourcePos {
+	return n.get().start()
+}
+
+func (n *serviceElement) end() *SourcePos {
+	return n.get().end()
+}
+
+func (n *serviceElement) leadingComments() []*comment {
+	return n.get().leadingComments()
+}
+
+func (n *serviceElement) trailingComments() []*comment {
+	return n.get().trailingComments()
+}
+
+func (n *serviceElement) get() node {
+	switch {
+	case n.option != nil:
+		return n.option
+	case n.rpc != nil:
+		return n.rpc
+	default:
+		return n.empty
+	}
+}
+
+type methodNode struct {
+	basicCompositeNode
+	name    *identNode
+	input   *rpcTypeNode
+	output  *rpcTypeNode
+	options []*optionNode
+}
+
+func (n *methodNode) getInputType() node {
+	return n.input.msgType
+}
+
+func (n *methodNode) getOutputType() node {
+	return n.output.msgType
+}
+
+type rpcTypeNode struct {
+	basicCompositeNode
+	msgType       *identNode
+	streamKeyword node
+}
+
+type noSourceNode struct {
+	pos *SourcePos
+}
+
+func (n noSourceNode) start() *SourcePos {
+	return n.pos
+}
+
+func (n noSourceNode) end() *SourcePos {
+	return n.pos
+}
+
+func (n noSourceNode) leadingComments() []*comment {
+	return nil
+}
+
+func (n noSourceNode) trailingComments() []*comment {
+	return nil
+}
+
+func (n noSourceNode) getSyntax() node {
+	return n
+}
+
+func (n noSourceNode) getName() node {
+	return n
+}
+
+func (n noSourceNode) getValue() valueNode {
+	return n
+}
+
+func (n noSourceNode) fieldLabel() node {
+	return n
+}
+
+func (n noSourceNode) fieldName() node {
+	return n
+}
+
+func (n noSourceNode) fieldType() node {
+	return n
+}
+
+func (n noSourceNode) fieldTag() node {
+	return n
+}
+
+func (n noSourceNode) fieldExtendee() node {
+	return n
+}
+
+func (n noSourceNode) getGroupKeyword() node {
+	return n
+}
+
+func (n noSourceNode) rangeStart() node {
+	return n
+}
+
+func (n noSourceNode) rangeEnd() node {
+	return n
+}
+
+func (n noSourceNode) getNumber() node {
+	return n
+}
+
+func (n noSourceNode) messageName() node {
+	return n
+}
+
+func (n noSourceNode) reservedNames() []*stringLiteralNode {
+	return nil
+}
+
+func (n noSourceNode) getInputType() node {
+	return n
+}
+
+func (n noSourceNode) getOutputType() node {
+	return n
+}
+
+func (n noSourceNode) value() interface{} {
+	return nil
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/doc.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/doc.go
new file mode 100644
index 0000000..c6446d3
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/doc.go
@@ -0,0 +1,10 @@
+// Package protoparse provides functionality for parsing *.proto source files
+// into descriptors that can be used with other protoreflect packages, like
+// dynamic messages and dynamic GRPC clients.
+//
+// This package links in other packages that include compiled descriptors for
+// the various "google/protobuf/*.proto" files that are included with protoc.
+// That way, like when invoking protoc, programs need not supply copies of these
+// "builtin" files. Though if copies of the files are provided, they will be
+// used instead of the builtin descriptors.
+package protoparse
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/lexer.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/lexer.go
new file mode 100644
index 0000000..c685e56
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/lexer.go
@@ -0,0 +1,766 @@
+package protoparse
+
+import (
+	"bufio"
+	"bytes"
+	"errors"
+	"fmt"
+	"io"
+	"strconv"
+	"strings"
+	"unicode/utf8"
+)
+
+type runeReader struct {
+	rr     *bufio.Reader
+	unread []rune
+	err    error
+}
+
+func (rr *runeReader) readRune() (r rune, size int, err error) {
+	if rr.err != nil {
+		return 0, 0, rr.err
+	}
+	if len(rr.unread) > 0 {
+		r := rr.unread[len(rr.unread)-1]
+		rr.unread = rr.unread[:len(rr.unread)-1]
+		return r, utf8.RuneLen(r), nil
+	}
+	r, sz, err := rr.rr.ReadRune()
+	if err != nil {
+		rr.err = err
+	}
+	return r, sz, err
+}
+
+func (rr *runeReader) unreadRune(r rune) {
+	rr.unread = append(rr.unread, r)
+}
+
+func lexError(l protoLexer, pos *SourcePos, err string) {
+	pl := l.(*protoLex)
+	if pl.err == nil {
+		pl.err = ErrorWithSourcePos{Underlying: errors.New(err), Pos: pos}
+	}
+}
+
+type protoLex struct {
+	filename string
+	input    *runeReader
+	err      error
+	res      *fileNode
+
+	lineNo int
+	colNo  int
+	offset int
+
+	prevSym terminalNode
+}
+
+func newLexer(in io.Reader) *protoLex {
+	return &protoLex{input: &runeReader{rr: bufio.NewReader(in)}}
+}
+
+var keywords = map[string]int{
+	"syntax":     _SYNTAX,
+	"import":     _IMPORT,
+	"weak":       _WEAK,
+	"public":     _PUBLIC,
+	"package":    _PACKAGE,
+	"option":     _OPTION,
+	"true":       _TRUE,
+	"false":      _FALSE,
+	"inf":        _INF,
+	"nan":        _NAN,
+	"repeated":   _REPEATED,
+	"optional":   _OPTIONAL,
+	"required":   _REQUIRED,
+	"double":     _DOUBLE,
+	"float":      _FLOAT,
+	"int32":      _INT32,
+	"int64":      _INT64,
+	"uint32":     _UINT32,
+	"uint64":     _UINT64,
+	"sint32":     _SINT32,
+	"sint64":     _SINT64,
+	"fixed32":    _FIXED32,
+	"fixed64":    _FIXED64,
+	"sfixed32":   _SFIXED32,
+	"sfixed64":   _SFIXED64,
+	"bool":       _BOOL,
+	"string":     _STRING,
+	"bytes":      _BYTES,
+	"group":      _GROUP,
+	"oneof":      _ONEOF,
+	"map":        _MAP,
+	"extensions": _EXTENSIONS,
+	"to":         _TO,
+	"max":        _MAX,
+	"reserved":   _RESERVED,
+	"enum":       _ENUM,
+	"message":    _MESSAGE,
+	"extend":     _EXTEND,
+	"service":    _SERVICE,
+	"rpc":        _RPC,
+	"stream":     _STREAM,
+	"returns":    _RETURNS,
+}
+
+func (l *protoLex) cur() *SourcePos {
+	return &SourcePos{
+		Filename: l.filename,
+		Offset:   l.offset,
+		Line:     l.lineNo + 1,
+		Col:      l.colNo + 1,
+	}
+}
+
+func (l *protoLex) prev() *SourcePos {
+	if l.prevSym == nil {
+		return &SourcePos{
+			Filename: l.filename,
+			Offset:   0,
+			Line:     1,
+			Col:      1,
+		}
+	}
+	return l.prevSym.start()
+}
+
+func (l *protoLex) Lex(lval *protoSymType) int {
+	if l.err != nil {
+		// if we are already in a failed state, bail
+		lval.err = l.err
+		return _ERROR
+	}
+
+	prevLineNo := l.lineNo
+	prevColNo := l.colNo
+	prevOffset := l.offset
+	var comments []*comment
+
+	pos := func() posRange {
+		return posRange{
+			start: &SourcePos{
+				Filename: l.filename,
+				Offset:   prevOffset,
+				Line:     prevLineNo + 1,
+				Col:      prevColNo + 1,
+			},
+			end: l.cur(),
+		}
+	}
+	basic := func() basicNode {
+		return basicNode{
+			posRange: pos(),
+			leading:  comments,
+		}
+	}
+	setPrev := func(n terminalNode) {
+		nStart := n.start().Line
+		if _, ok := n.(*basicNode); ok {
+			// if the node is a simple rune, don't attribute comments to it
+			// HACK: adjusting the start line makes leading comments appear
+			// detached so logic below will naturally associated trailing
+			// comment to previous symbol
+			nStart += 2
+		}
+		if l.prevSym != nil && len(n.leadingComments()) > 0 && l.prevSym.end().Line < nStart {
+			// we may need to re-attribute the first comment to
+			// instead be previous node's trailing comment
+			prevEnd := l.prevSym.end().Line
+			comments := n.leadingComments()
+			c := comments[0]
+			commentStart := c.start.Line
+			if commentStart == prevEnd {
+				// comment is on same line as previous symbol
+				n.popLeadingComment()
+				l.prevSym.pushTrailingComment(c)
+			} else if commentStart == prevEnd+1 {
+				// comment is right after previous symbol; see if it is detached
+				// and if so re-attribute
+				singleLineStyle := strings.HasPrefix(c.text, "//")
+				line := c.end.Line
+				groupEnd := -1
+				for i := 1; i < len(comments); i++ {
+					c := comments[i]
+					newGroup := false
+					if !singleLineStyle || c.start.Line > line+1 {
+						// we've found a gap between comments, which means the
+						// previous comments were detached
+						newGroup = true
+					} else {
+						line = c.end.Line
+						singleLineStyle = strings.HasPrefix(comments[i].text, "//")
+						if !singleLineStyle {
+							// we've found a switch from // comments to /*
+							// consider that a new group which means the
+							// previous comments were detached
+							newGroup = true
+						}
+					}
+					if newGroup {
+						groupEnd = i
+						break
+					}
+				}
+
+				if groupEnd == -1 {
+					// just one group of comments; we'll mark it as a trailing
+					// comment if it immediately follows previous symbol and is
+					// detached from current symbol
+					c1 := comments[0]
+					c2 := comments[len(comments)-1]
+					if c1.start.Line <= prevEnd+1 && c2.end.Line < nStart-1 {
+						groupEnd = len(comments)
+					}
+				}
+
+				for i := 0; i < groupEnd; i++ {
+					l.prevSym.pushTrailingComment(n.popLeadingComment())
+				}
+			}
+		}
+
+		l.prevSym = n
+	}
+	setString := func(val string) {
+		b := basic()
+		lval.str = &stringLiteralNode{val: val}
+		lval.str.setRange(&b, &b)
+		setPrev(lval.str)
+	}
+	setIdent := func(val string, kind identKind) {
+		lval.id = &identNode{basicNode: basic(), val: val, kind: kind}
+		setPrev(lval.id)
+	}
+	setInt := func(val uint64) {
+		lval.ui = &intLiteralNode{basicNode: basic(), val: val}
+		setPrev(lval.ui)
+	}
+	setFloat := func(val float64) {
+		b := basic()
+		lval.f = &floatLiteralNode{val: val}
+		lval.f.setRange(&b, &b)
+		setPrev(lval.f)
+	}
+	setRune := func() {
+		b := basic()
+		lval.b = &b
+		setPrev(lval.b)
+	}
+	setError := func(err error) {
+		lval.err = err
+		l.err = err
+	}
+
+	for {
+		c, n, err := l.input.readRune()
+		if err == io.EOF {
+			// we're not actually returning a rune, but this will associate
+			// accumulated comments as a trailing comment on last symbol
+			// (if appropriate)
+			setRune()
+			return 0
+		} else if err != nil {
+			setError(err)
+			return _ERROR
+		}
+
+		prevLineNo = l.lineNo
+		prevColNo = l.colNo
+		prevOffset = l.offset
+
+		l.offset += n
+		if c == '\n' {
+			l.colNo = 0
+			l.lineNo++
+			continue
+		} else if c == '\r' {
+			continue
+		}
+		l.colNo++
+		if c == ' ' || c == '\t' {
+			continue
+		}
+
+		if c == '.' {
+			// tokens that start with a dot include type names and decimal literals
+			cn, _, err := l.input.readRune()
+			if err != nil {
+				setRune()
+				return int(c)
+			}
+			if cn == '_' || (cn >= 'a' && cn <= 'z') || (cn >= 'A' && cn <= 'Z') {
+				l.colNo++
+				token := []rune{c, cn}
+				token = l.readIdentifier(token)
+				setIdent(string(token), identTypeName)
+				return _TYPENAME
+			}
+			if cn >= '0' && cn <= '9' {
+				l.colNo++
+				token := []rune{c, cn}
+				token = l.readNumber(token, false, true)
+				f, err := strconv.ParseFloat(string(token), 64)
+				if err != nil {
+					setError(err)
+					return _ERROR
+				}
+				setFloat(f)
+				return _FLOAT_LIT
+			}
+			l.input.unreadRune(cn)
+			setRune()
+			return int(c)
+		}
+
+		if c == '_' || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') {
+			// identifier
+			token := []rune{c}
+			token = l.readIdentifier(token)
+			str := string(token)
+			if strings.Contains(str, ".") {
+				setIdent(str, identQualified)
+				return _FQNAME
+			}
+			if t, ok := keywords[str]; ok {
+				setIdent(str, identSimpleName)
+				return t
+			}
+			setIdent(str, identSimpleName)
+			return _NAME
+		}
+
+		if c >= '0' && c <= '9' {
+			// integer or float literal
+			if c == '0' {
+				cn, _, err := l.input.readRune()
+				if err != nil {
+					setInt(0)
+					return _INT_LIT
+				}
+				if cn == 'x' || cn == 'X' {
+					cnn, _, err := l.input.readRune()
+					if err != nil {
+						l.input.unreadRune(cn)
+						setInt(0)
+						return _INT_LIT
+					}
+					if (cnn >= '0' && cnn <= '9') || (cnn >= 'a' && cnn <= 'f') || (cnn >= 'A' && cnn <= 'F') {
+						// hexadecimal!
+						l.colNo += 2
+						token := []rune{cnn}
+						token = l.readHexNumber(token)
+						ui, err := strconv.ParseUint(string(token), 16, 64)
+						if err != nil {
+							setError(err)
+							return _ERROR
+						}
+						setInt(ui)
+						return _INT_LIT
+					}
+					l.input.unreadRune(cnn)
+					l.input.unreadRune(cn)
+					setInt(0)
+					return _INT_LIT
+				} else {
+					l.input.unreadRune(cn)
+				}
+			}
+			token := []rune{c}
+			token = l.readNumber(token, true, true)
+			numstr := string(token)
+			if strings.Contains(numstr, ".") || strings.Contains(numstr, "e") || strings.Contains(numstr, "E") {
+				// floating point!
+				f, err := strconv.ParseFloat(numstr, 64)
+				if err != nil {
+					setError(err)
+					return _ERROR
+				}
+				setFloat(f)
+				return _FLOAT_LIT
+			}
+			// integer! (decimal or octal)
+			ui, err := strconv.ParseUint(numstr, 0, 64)
+			if err != nil {
+				setError(err)
+				return _ERROR
+			}
+			setInt(ui)
+			return _INT_LIT
+		}
+
+		if c == '\'' || c == '"' {
+			// string literal
+			str, err := l.readStringLiteral(c)
+			if err != nil {
+				setError(err)
+				return _ERROR
+			}
+			setString(str)
+			return _STRING_LIT
+		}
+
+		if c == '/' {
+			// comment
+			cn, _, err := l.input.readRune()
+			if err != nil {
+				setRune()
+				return int(c)
+			}
+			if cn == '/' {
+				l.colNo++
+				hitNewline, txt := l.skipToEndOfLineComment()
+				commentPos := pos()
+				commentPos.end.Col++
+				if hitNewline {
+					l.colNo = 0
+					l.lineNo++
+				}
+				comments = append(comments, &comment{posRange: commentPos, text: txt})
+				continue
+			}
+			if cn == '*' {
+				l.colNo++
+				if txt, ok := l.skipToEndOfBlockComment(); !ok {
+					setError(errors.New("block comment never terminates, unexpected EOF"))
+					return _ERROR
+				} else {
+					comments = append(comments, &comment{posRange: pos(), text: txt})
+				}
+				continue
+			}
+			l.input.unreadRune(cn)
+		}
+
+		setRune()
+		return int(c)
+	}
+}
+
+func (l *protoLex) readNumber(sofar []rune, allowDot bool, allowExp bool) []rune {
+	token := sofar
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			break
+		}
+		if c == '.' {
+			if !allowDot {
+				l.input.unreadRune(c)
+				break
+			}
+			allowDot = false
+			cn, _, err := l.input.readRune()
+			if err != nil {
+				l.input.unreadRune(c)
+				break
+			}
+			if cn < '0' || cn > '9' {
+				l.input.unreadRune(cn)
+				l.input.unreadRune(c)
+				break
+			}
+			l.colNo++
+			token = append(token, c)
+			c = cn
+		} else if c == 'e' || c == 'E' {
+			if !allowExp {
+				l.input.unreadRune(c)
+				break
+			}
+			allowExp = false
+			cn, _, err := l.input.readRune()
+			if err != nil {
+				l.input.unreadRune(c)
+				break
+			}
+			if cn == '-' || cn == '+' {
+				cnn, _, err := l.input.readRune()
+				if err != nil {
+					l.input.unreadRune(cn)
+					l.input.unreadRune(c)
+					break
+				}
+				if cnn < '0' || cnn > '9' {
+					l.input.unreadRune(cnn)
+					l.input.unreadRune(cn)
+					l.input.unreadRune(c)
+					break
+				}
+				l.colNo++
+				token = append(token, c)
+				c = cn
+				cn = cnn
+			} else if cn < '0' || cn > '9' {
+				l.input.unreadRune(cn)
+				l.input.unreadRune(c)
+				break
+			}
+			l.colNo++
+			token = append(token, c)
+			c = cn
+		} else if c < '0' || c > '9' {
+			l.input.unreadRune(c)
+			break
+		}
+		l.colNo++
+		token = append(token, c)
+	}
+	return token
+}
+
+func (l *protoLex) readHexNumber(sofar []rune) []rune {
+	token := sofar
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			break
+		}
+		if (c < 'a' || c > 'f') && (c < 'A' || c > 'F') && (c < '0' || c > '9') {
+			l.input.unreadRune(c)
+			break
+		}
+		l.colNo++
+		token = append(token, c)
+	}
+	return token
+}
+
+func (l *protoLex) readIdentifier(sofar []rune) []rune {
+	token := sofar
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			break
+		}
+		if c == '.' {
+			cn, _, err := l.input.readRune()
+			if err != nil {
+				l.input.unreadRune(c)
+				break
+			}
+			if cn != '_' && (cn < 'a' || cn > 'z') && (cn < 'A' || cn > 'Z') {
+				l.input.unreadRune(cn)
+				l.input.unreadRune(c)
+				break
+			}
+			l.colNo++
+			token = append(token, c)
+			c = cn
+		} else if c != '_' && (c < 'a' || c > 'z') && (c < 'A' || c > 'Z') && (c < '0' || c > '9') {
+			l.input.unreadRune(c)
+			break
+		}
+		l.colNo++
+		token = append(token, c)
+	}
+	return token
+}
+
+func (l *protoLex) readStringLiteral(quote rune) (string, error) {
+	var buf bytes.Buffer
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			if err == io.EOF {
+				err = io.ErrUnexpectedEOF
+			}
+			return "", err
+		}
+		if c == '\n' {
+			l.colNo = 0
+			l.lineNo++
+			return "", errors.New("encountered end-of-line before end of string literal")
+		}
+		l.colNo++
+		if c == quote {
+			break
+		}
+		if c == 0 {
+			return "", errors.New("null character ('\\0') not allowed in string literal")
+		}
+		if c == '\\' {
+			// escape sequence
+			c, _, err = l.input.readRune()
+			if err != nil {
+				return "", err
+			}
+			l.colNo++
+			if c == 'x' || c == 'X' {
+				// hex escape
+				c, _, err := l.input.readRune()
+				if err != nil {
+					return "", err
+				}
+				l.colNo++
+				c2, _, err := l.input.readRune()
+				if err != nil {
+					return "", err
+				}
+				var hex string
+				if (c2 < '0' || c2 > '9') && (c2 < 'a' || c2 > 'f') && (c2 < 'A' || c2 > 'F') {
+					l.input.unreadRune(c2)
+					hex = string(c)
+				} else {
+					l.colNo++
+					hex = string([]rune{c, c2})
+				}
+				i, err := strconv.ParseInt(hex, 16, 32)
+				if err != nil {
+					return "", fmt.Errorf("invalid hex escape: \\x%q", hex)
+				}
+				buf.WriteByte(byte(i))
+
+			} else if c >= '0' && c <= '7' {
+				// octal escape
+				c2, _, err := l.input.readRune()
+				if err != nil {
+					return "", err
+				}
+				var octal string
+				if c2 < '0' || c2 > '7' {
+					l.input.unreadRune(c2)
+					octal = string(c)
+				} else {
+					l.colNo++
+					c3, _, err := l.input.readRune()
+					if err != nil {
+						return "", err
+					}
+					if c3 < '0' || c3 > '7' {
+						l.input.unreadRune(c3)
+						octal = string([]rune{c, c2})
+					} else {
+						l.colNo++
+						octal = string([]rune{c, c2, c3})
+					}
+				}
+				i, err := strconv.ParseInt(octal, 8, 32)
+				if err != nil {
+					return "", fmt.Errorf("invalid octal escape: \\%q", octal)
+				}
+				if i > 0xff {
+					return "", fmt.Errorf("octal escape is out range, must be between 0 and 377: \\%q", octal)
+				}
+				buf.WriteByte(byte(i))
+
+			} else if c == 'u' {
+				// short unicode escape
+				u := make([]rune, 4)
+				for i := range u {
+					c, _, err := l.input.readRune()
+					if err != nil {
+						return "", err
+					}
+					l.colNo++
+					u[i] = c
+				}
+				i, err := strconv.ParseInt(string(u), 16, 32)
+				if err != nil {
+					return "", fmt.Errorf("invalid unicode escape: \\u%q", string(u))
+				}
+				buf.WriteRune(rune(i))
+
+			} else if c == 'U' {
+				// long unicode escape
+				u := make([]rune, 8)
+				for i := range u {
+					c, _, err := l.input.readRune()
+					if err != nil {
+						return "", err
+					}
+					l.colNo++
+					u[i] = c
+				}
+				i, err := strconv.ParseInt(string(u), 16, 32)
+				if err != nil {
+					return "", fmt.Errorf("invalid unicode escape: \\U%q", string(u))
+				}
+				if i > 0x10ffff || i < 0 {
+					return "", fmt.Errorf("unicode escape is out of range, must be between 0 and 0x10ffff: \\U%q", string(u))
+				}
+				buf.WriteRune(rune(i))
+
+			} else if c == 'a' {
+				buf.WriteByte('\a')
+			} else if c == 'b' {
+				buf.WriteByte('\b')
+			} else if c == 'f' {
+				buf.WriteByte('\f')
+			} else if c == 'n' {
+				buf.WriteByte('\n')
+			} else if c == 'r' {
+				buf.WriteByte('\r')
+			} else if c == 't' {
+				buf.WriteByte('\t')
+			} else if c == 'v' {
+				buf.WriteByte('\v')
+			} else if c == '\\' {
+				buf.WriteByte('\\')
+			} else if c == '\'' {
+				buf.WriteByte('\'')
+			} else if c == '"' {
+				buf.WriteByte('"')
+			} else if c == '?' {
+				buf.WriteByte('?')
+			} else {
+				return "", fmt.Errorf("invalid escape sequence: %q", "\\"+string(c))
+			}
+		} else {
+			buf.WriteRune(c)
+		}
+	}
+	return buf.String(), nil
+}
+
+func (l *protoLex) skipToEndOfLineComment() (bool, string) {
+	txt := []rune{'/', '/'}
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			return false, string(txt)
+		}
+		if c == '\n' {
+			return true, string(txt)
+		}
+		l.colNo++
+		txt = append(txt, c)
+	}
+}
+
+func (l *protoLex) skipToEndOfBlockComment() (string, bool) {
+	txt := []rune{'/', '*'}
+	for {
+		c, _, err := l.input.readRune()
+		if err != nil {
+			return "", false
+		}
+		if c == '\n' {
+			l.colNo = 0
+			l.lineNo++
+		} else {
+			l.colNo++
+		}
+		txt = append(txt, c)
+		if c == '*' {
+			c, _, err := l.input.readRune()
+			if err != nil {
+				return "", false
+			}
+			if c == '/' {
+				l.colNo++
+				txt = append(txt, c)
+				return string(txt), true
+			}
+			l.input.unreadRune(c)
+		}
+	}
+}
+
+func (l *protoLex) Error(s string) {
+	if l.err == nil {
+		l.err = ErrorWithSourcePos{Underlying: errors.New(s), Pos: l.prevSym.start()}
+	}
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/linker.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/linker.go
new file mode 100644
index 0000000..c150936
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/linker.go
@@ -0,0 +1,652 @@
+package protoparse
+
+import (
+	"bytes"
+	"fmt"
+	"sort"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/desc"
+	"github.com/jhump/protoreflect/desc/internal"
+)
+
+type linker struct {
+	files          map[string]*parseResult
+	descriptorPool map[*dpb.FileDescriptorProto]map[string]proto.Message
+	extensions     map[string]map[int32]string
+}
+
+func newLinker(files map[string]*parseResult) *linker {
+	return &linker{files: files}
+}
+
+func (l *linker) linkFiles() (map[string]*desc.FileDescriptor, error) {
+	// First, we put all symbols into a single pool, which lets us ensure there
+	// are no duplicate symbols and will also let us resolve and revise all type
+	// references in next step.
+	if err := l.createDescriptorPool(); err != nil {
+		return nil, err
+	}
+
+	// After we've populated the pool, we can now try to resolve all type
+	// references. All references must be checked for correct type, any fields
+	// with enum types must be corrected (since we parse them as if they are
+	// message references since we don't actually know message or enum until
+	// link time), and references will be re-written to be fully-qualified
+	// references (e.g. start with a dot ".").
+	if err := l.resolveReferences(); err != nil {
+		return nil, err
+	}
+
+	// Now we've validated the descriptors, so we can link them into rich
+	// descriptors. This is a little redundant since that step does similar
+	// checking of symbols. But, without breaking encapsulation (e.g. exporting
+	// a lot of fields from desc package that are currently unexported) or
+	// merging this into the same package, we can't really prevent it.
+	linked, err := l.createdLinkedDescriptors()
+	if err != nil {
+		return nil, err
+	}
+
+	// Now that we have linked descriptors, we can interpret any uninterpreted
+	// options that remain.
+	for _, r := range l.files {
+		fd := linked[r.fd.GetName()]
+		if err := interpretFileOptions(r, richFileDescriptorish{FileDescriptor: fd}); err != nil {
+			return nil, err
+		}
+	}
+
+	return linked, nil
+}
+
+func (l *linker) createDescriptorPool() error {
+	l.descriptorPool = map[*dpb.FileDescriptorProto]map[string]proto.Message{}
+	for _, r := range l.files {
+		fd := r.fd
+		pool := map[string]proto.Message{}
+		l.descriptorPool[fd] = pool
+		prefix := fd.GetPackage()
+		if prefix != "" {
+			prefix += "."
+		}
+		for _, md := range fd.MessageType {
+			if err := addMessageToPool(r, pool, prefix, md); err != nil {
+				return err
+			}
+		}
+		for _, fld := range fd.Extension {
+			if err := addFieldToPool(r, pool, prefix, fld); err != nil {
+				return err
+			}
+		}
+		for _, ed := range fd.EnumType {
+			if err := addEnumToPool(r, pool, prefix, ed); err != nil {
+				return err
+			}
+		}
+		for _, sd := range fd.Service {
+			if err := addServiceToPool(r, pool, prefix, sd); err != nil {
+				return err
+			}
+		}
+	}
+	// try putting everything into a single pool, to ensure there are no duplicates
+	// across files (e.g. same symbol, but declared in two different files)
+	type entry struct {
+		file string
+		msg  proto.Message
+	}
+	pool := map[string]entry{}
+	for f, p := range l.descriptorPool {
+		for k, v := range p {
+			if e, ok := pool[k]; ok {
+				desc1 := e.msg
+				file1 := e.file
+				desc2 := v
+				file2 := f.GetName()
+				if file2 < file1 {
+					file1, file2 = file2, file1
+					desc1, desc2 = desc2, desc1
+				}
+				node := l.files[file2].nodes[desc2]
+				return ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("duplicate symbol %s: already defined as %s in %q", k, descriptorType(desc1), file1)}
+			}
+			pool[k] = entry{file: f.GetName(), msg: v}
+		}
+	}
+
+	return nil
+}
+
+func addMessageToPool(r *parseResult, pool map[string]proto.Message, prefix string, md *dpb.DescriptorProto) error {
+	fqn := prefix + md.GetName()
+	if err := addToPool(r, pool, fqn, md); err != nil {
+		return err
+	}
+	prefix = fqn + "."
+	for _, fld := range md.Field {
+		if err := addFieldToPool(r, pool, prefix, fld); err != nil {
+			return err
+		}
+	}
+	for _, fld := range md.Extension {
+		if err := addFieldToPool(r, pool, prefix, fld); err != nil {
+			return err
+		}
+	}
+	for _, nmd := range md.NestedType {
+		if err := addMessageToPool(r, pool, prefix, nmd); err != nil {
+			return err
+		}
+	}
+	for _, ed := range md.EnumType {
+		if err := addEnumToPool(r, pool, prefix, ed); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func addFieldToPool(r *parseResult, pool map[string]proto.Message, prefix string, fld *dpb.FieldDescriptorProto) error {
+	fqn := prefix + fld.GetName()
+	return addToPool(r, pool, fqn, fld)
+}
+
+func addEnumToPool(r *parseResult, pool map[string]proto.Message, prefix string, ed *dpb.EnumDescriptorProto) error {
+	fqn := prefix + ed.GetName()
+	if err := addToPool(r, pool, fqn, ed); err != nil {
+		return err
+	}
+	for _, evd := range ed.Value {
+		vfqn := fqn + "." + evd.GetName()
+		if err := addToPool(r, pool, vfqn, evd); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func addServiceToPool(r *parseResult, pool map[string]proto.Message, prefix string, sd *dpb.ServiceDescriptorProto) error {
+	fqn := prefix + sd.GetName()
+	if err := addToPool(r, pool, fqn, sd); err != nil {
+		return err
+	}
+	for _, mtd := range sd.Method {
+		mfqn := fqn + "." + mtd.GetName()
+		if err := addToPool(r, pool, mfqn, mtd); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func addToPool(r *parseResult, pool map[string]proto.Message, fqn string, dsc proto.Message) error {
+	if d, ok := pool[fqn]; ok {
+		node := r.nodes[dsc]
+		return ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("duplicate symbol %s: already defined as %s", fqn, descriptorType(d))}
+	}
+	pool[fqn] = dsc
+	return nil
+}
+
+func descriptorType(m proto.Message) string {
+	switch m := m.(type) {
+	case *dpb.DescriptorProto:
+		return "message"
+	case *dpb.DescriptorProto_ExtensionRange:
+		return "extension range"
+	case *dpb.FieldDescriptorProto:
+		if m.GetExtendee() == "" {
+			return "field"
+		} else {
+			return "extension"
+		}
+	case *dpb.EnumDescriptorProto:
+		return "enum"
+	case *dpb.EnumValueDescriptorProto:
+		return "enum value"
+	case *dpb.ServiceDescriptorProto:
+		return "service"
+	case *dpb.MethodDescriptorProto:
+		return "method"
+	case *dpb.FileDescriptorProto:
+		return "file"
+	default:
+		// shouldn't be possible
+		return fmt.Sprintf("%T", m)
+	}
+}
+
+func (l *linker) resolveReferences() error {
+	l.extensions = map[string]map[int32]string{}
+	for _, r := range l.files {
+		fd := r.fd
+		prefix := fd.GetPackage()
+		scopes := []scope{fileScope(fd, l)}
+		if prefix != "" {
+			prefix += "."
+		}
+		if fd.Options != nil {
+			if err := l.resolveOptions(r, fd, "file", fd.GetName(), proto.MessageName(fd.Options), fd.Options.UninterpretedOption, scopes); err != nil {
+				return err
+			}
+		}
+		for _, md := range fd.MessageType {
+			if err := l.resolveMessageTypes(r, fd, prefix, md, scopes); err != nil {
+				return err
+			}
+		}
+		for _, fld := range fd.Extension {
+			if err := l.resolveFieldTypes(r, fd, prefix, fld, scopes); err != nil {
+				return err
+			}
+		}
+		for _, ed := range fd.EnumType {
+			if err := l.resolveEnumTypes(r, fd, prefix, ed, scopes); err != nil {
+				return err
+			}
+		}
+		for _, sd := range fd.Service {
+			if err := l.resolveServiceTypes(r, fd, prefix, sd, scopes); err != nil {
+				return err
+			}
+		}
+	}
+	return nil
+}
+
+func (l *linker) resolveEnumTypes(r *parseResult, fd *dpb.FileDescriptorProto, prefix string, ed *dpb.EnumDescriptorProto, scopes []scope) error {
+	enumFqn := prefix + ed.GetName()
+	if ed.Options != nil {
+		if err := l.resolveOptions(r, fd, "enum", enumFqn, proto.MessageName(ed.Options), ed.Options.UninterpretedOption, scopes); err != nil {
+			return err
+		}
+	}
+	for _, evd := range ed.Value {
+		if evd.Options != nil {
+			evFqn := enumFqn + "." + evd.GetName()
+			if err := l.resolveOptions(r, fd, "enum value", evFqn, proto.MessageName(evd.Options), evd.Options.UninterpretedOption, scopes); err != nil {
+				return err
+			}
+		}
+	}
+	return nil
+}
+
+func (l *linker) resolveMessageTypes(r *parseResult, fd *dpb.FileDescriptorProto, prefix string, md *dpb.DescriptorProto, scopes []scope) error {
+	fqn := prefix + md.GetName()
+	scope := messageScope(fqn, isProto3(fd), l.descriptorPool[fd])
+	scopes = append(scopes, scope)
+	prefix = fqn + "."
+
+	if md.Options != nil {
+		if err := l.resolveOptions(r, fd, "message", fqn, proto.MessageName(md.Options), md.Options.UninterpretedOption, scopes); err != nil {
+			return err
+		}
+	}
+
+	for _, nmd := range md.NestedType {
+		if err := l.resolveMessageTypes(r, fd, prefix, nmd, scopes); err != nil {
+			return err
+		}
+	}
+	for _, ned := range md.EnumType {
+		if err := l.resolveEnumTypes(r, fd, prefix, ned, scopes); err != nil {
+			return err
+		}
+	}
+	for _, fld := range md.Field {
+		if err := l.resolveFieldTypes(r, fd, prefix, fld, scopes); err != nil {
+			return err
+		}
+	}
+	for _, fld := range md.Extension {
+		if err := l.resolveFieldTypes(r, fd, prefix, fld, scopes); err != nil {
+			return err
+		}
+	}
+	for _, er := range md.ExtensionRange {
+		if er.Options != nil {
+			erName := fmt.Sprintf("%s:%d-%d", fqn, er.GetStart(), er.GetEnd()-1)
+			if err := l.resolveOptions(r, fd, "extension range", erName, proto.MessageName(er.Options), er.Options.UninterpretedOption, scopes); err != nil {
+				return err
+			}
+		}
+	}
+	return nil
+}
+
+func (l *linker) resolveFieldTypes(r *parseResult, fd *dpb.FileDescriptorProto, prefix string, fld *dpb.FieldDescriptorProto, scopes []scope) error {
+	thisName := prefix + fld.GetName()
+	scope := fmt.Sprintf("field %s", thisName)
+	node := r.getFieldNode(fld)
+	elemType := "field"
+	if fld.GetExtendee() != "" {
+		fqn, dsc, _ := l.resolve(fd, fld.GetExtendee(), isMessage, scopes)
+		if dsc == nil {
+			return ErrorWithSourcePos{Pos: node.fieldExtendee().start(), Underlying: fmt.Errorf("unknown extendee type %s", fld.GetExtendee())}
+		}
+		extd, ok := dsc.(*dpb.DescriptorProto)
+		if !ok {
+			otherType := descriptorType(dsc)
+			return ErrorWithSourcePos{Pos: node.fieldExtendee().start(), Underlying: fmt.Errorf("extendee is invalid: %s is a %s, not a message", fqn, otherType)}
+		}
+		fld.Extendee = proto.String("." + fqn)
+		// make sure the tag number is in range
+		found := false
+		tag := fld.GetNumber()
+		for _, rng := range extd.ExtensionRange {
+			if tag >= rng.GetStart() && tag < rng.GetEnd() {
+				found = true
+				break
+			}
+		}
+		if !found {
+			return ErrorWithSourcePos{Pos: node.fieldTag().start(), Underlying: fmt.Errorf("%s: tag %d is not in valid range for extended type %s", scope, tag, fqn)}
+		}
+		// make sure tag is not a duplicate
+		usedExtTags := l.extensions[fqn]
+		if usedExtTags == nil {
+			usedExtTags = map[int32]string{}
+			l.extensions[fqn] = usedExtTags
+		}
+		if other := usedExtTags[fld.GetNumber()]; other != "" {
+			return ErrorWithSourcePos{Pos: node.fieldTag().start(), Underlying: fmt.Errorf("%s: duplicate extension: %s and %s are both using tag %d", scope, other, thisName, fld.GetNumber())}
+		}
+		usedExtTags[fld.GetNumber()] = thisName
+		elemType = "extension"
+	}
+
+	if fld.Options != nil {
+		if err := l.resolveOptions(r, fd, elemType, thisName, proto.MessageName(fld.Options), fld.Options.UninterpretedOption, scopes); err != nil {
+			return err
+		}
+	}
+
+	if fld.GetTypeName() == "" {
+		// scalar type; no further resolution required
+		return nil
+	}
+
+	fqn, dsc, proto3 := l.resolve(fd, fld.GetTypeName(), isType, scopes)
+	if dsc == nil {
+		return ErrorWithSourcePos{Pos: node.fieldType().start(), Underlying: fmt.Errorf("%s: unknown type %s", scope, fld.GetTypeName())}
+	}
+	switch dsc := dsc.(type) {
+	case *dpb.DescriptorProto:
+		fld.TypeName = proto.String("." + fqn)
+	case *dpb.EnumDescriptorProto:
+		if fld.GetExtendee() == "" && isProto3(fd) && !proto3 {
+			// fields in a proto3 message cannot refer to proto2 enums
+			return ErrorWithSourcePos{Pos: node.fieldType().start(), Underlying: fmt.Errorf("%s: cannot use proto2 enum %s in a proto3 message", scope, fld.GetTypeName())}
+		}
+		fld.TypeName = proto.String("." + fqn)
+		// the type was tentatively set to message, but now we know it's actually an enum
+		fld.Type = dpb.FieldDescriptorProto_TYPE_ENUM.Enum()
+	default:
+		otherType := descriptorType(dsc)
+		return ErrorWithSourcePos{Pos: node.fieldType().start(), Underlying: fmt.Errorf("%s: invalid type: %s is a %s, not a message or enum", scope, fqn, otherType)}
+	}
+	return nil
+}
+
+func (l *linker) resolveServiceTypes(r *parseResult, fd *dpb.FileDescriptorProto, prefix string, sd *dpb.ServiceDescriptorProto, scopes []scope) error {
+	thisName := prefix + sd.GetName()
+	if sd.Options != nil {
+		if err := l.resolveOptions(r, fd, "service", thisName, proto.MessageName(sd.Options), sd.Options.UninterpretedOption, scopes); err != nil {
+			return err
+		}
+	}
+
+	for _, mtd := range sd.Method {
+		if mtd.Options != nil {
+			if err := l.resolveOptions(r, fd, "method", thisName+"."+mtd.GetName(), proto.MessageName(mtd.Options), mtd.Options.UninterpretedOption, scopes); err != nil {
+				return err
+			}
+		}
+		scope := fmt.Sprintf("method %s.%s", thisName, mtd.GetName())
+		node := r.getMethodNode(mtd)
+		fqn, dsc, _ := l.resolve(fd, mtd.GetInputType(), isMessage, scopes)
+		if dsc == nil {
+			return ErrorWithSourcePos{Pos: node.getInputType().start(), Underlying: fmt.Errorf("%s: unknown request type %s", scope, mtd.GetInputType())}
+		}
+		if _, ok := dsc.(*dpb.DescriptorProto); !ok {
+			otherType := descriptorType(dsc)
+			return ErrorWithSourcePos{Pos: node.getInputType().start(), Underlying: fmt.Errorf("%s: invalid request type: %s is a %s, not a message", scope, fqn, otherType)}
+		}
+		mtd.InputType = proto.String("." + fqn)
+
+		fqn, dsc, _ = l.resolve(fd, mtd.GetOutputType(), isMessage, scopes)
+		if dsc == nil {
+			return ErrorWithSourcePos{Pos: node.getOutputType().start(), Underlying: fmt.Errorf("%s: unknown response type %s", scope, mtd.GetOutputType())}
+		}
+		if _, ok := dsc.(*dpb.DescriptorProto); !ok {
+			otherType := descriptorType(dsc)
+			return ErrorWithSourcePos{Pos: node.getOutputType().start(), Underlying: fmt.Errorf("%s: invalid response type: %s is a %s, not a message", scope, fqn, otherType)}
+		}
+		mtd.OutputType = proto.String("." + fqn)
+	}
+	return nil
+}
+
+func (l *linker) resolveOptions(r *parseResult, fd *dpb.FileDescriptorProto, elemType, elemName, optType string, opts []*dpb.UninterpretedOption, scopes []scope) error {
+	var scope string
+	if elemType != "file" {
+		scope = fmt.Sprintf("%s %s: ", elemType, elemName)
+	}
+	for _, opt := range opts {
+		for _, nm := range opt.Name {
+			if nm.GetIsExtension() {
+				node := r.getOptionNamePartNode(nm)
+				fqn, dsc, _ := l.resolve(fd, nm.GetNamePart(), isField, scopes)
+				if dsc == nil {
+					return ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("%sunknown extension %s", scope, nm.GetNamePart())}
+				}
+				if ext, ok := dsc.(*dpb.FieldDescriptorProto); !ok {
+					otherType := descriptorType(dsc)
+					return ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("%sinvalid extension: %s is a %s, not an extension", scope, nm.GetNamePart(), otherType)}
+				} else if ext.GetExtendee() == "" {
+					return ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("%sinvalid extension: %s is a field but not an extension", scope, nm.GetNamePart())}
+				}
+				nm.NamePart = proto.String("." + fqn)
+			}
+		}
+	}
+	return nil
+}
+
+func (l *linker) resolve(fd *dpb.FileDescriptorProto, name string, allowed func(proto.Message) bool, scopes []scope) (fqn string, element proto.Message, proto3 bool) {
+	if strings.HasPrefix(name, ".") {
+		// already fully-qualified
+		d, proto3 := l.findSymbol(fd, name[1:], false, map[*dpb.FileDescriptorProto]struct{}{})
+		if d != nil {
+			return name[1:], d, proto3
+		}
+	} else {
+		// unqualified, so we look in the enclosing (last) scope first and move
+		// towards outermost (first) scope, trying to resolve the symbol
+		var bestGuess proto.Message
+		var bestGuessFqn string
+		var bestGuessProto3 bool
+		for i := len(scopes) - 1; i >= 0; i-- {
+			fqn, d, proto3 := scopes[i](name)
+			if d != nil {
+				if allowed(d) {
+					return fqn, d, proto3
+				} else if bestGuess == nil {
+					bestGuess = d
+					bestGuessFqn = fqn
+					bestGuessProto3 = proto3
+				}
+			}
+		}
+		// we return best guess, even though it was not an allowed kind of
+		// descriptor, so caller can print a better error message (e.g.
+		// indicating that the name was found but that it's the wrong type)
+		return bestGuessFqn, bestGuess, bestGuessProto3
+	}
+	return "", nil, false
+}
+
+func isField(m proto.Message) bool {
+	_, ok := m.(*dpb.FieldDescriptorProto)
+	return ok
+}
+
+func isMessage(m proto.Message) bool {
+	_, ok := m.(*dpb.DescriptorProto)
+	return ok
+}
+
+func isType(m proto.Message) bool {
+	switch m.(type) {
+	case *dpb.DescriptorProto, *dpb.EnumDescriptorProto:
+		return true
+	}
+	return false
+}
+
+// scope represents a lexical scope in a proto file in which messages and enums
+// can be declared.
+type scope func(symbol string) (fqn string, element proto.Message, proto3 bool)
+
+func fileScope(fd *dpb.FileDescriptorProto, l *linker) scope {
+	// we search symbols in this file, but also symbols in other files that have
+	// the same package as this file or a "parent" package (in protobuf,
+	// packages are a hierarchy like C++ namespaces)
+	prefixes := internal.CreatePrefixList(fd.GetPackage())
+	return func(name string) (string, proto.Message, bool) {
+		for _, prefix := range prefixes {
+			var n string
+			if prefix == "" {
+				n = name
+			} else {
+				n = prefix + "." + name
+			}
+			d, proto3 := l.findSymbol(fd, n, false, map[*dpb.FileDescriptorProto]struct{}{})
+			if d != nil {
+				return n, d, proto3
+			}
+		}
+		return "", nil, false
+	}
+}
+
+func messageScope(messageName string, proto3 bool, filePool map[string]proto.Message) scope {
+	return func(name string) (string, proto.Message, bool) {
+		n := messageName + "." + name
+		if d, ok := filePool[n]; ok {
+			return n, d, proto3
+		}
+		return "", nil, false
+	}
+}
+
+func (l *linker) findSymbol(fd *dpb.FileDescriptorProto, name string, public bool, checked map[*dpb.FileDescriptorProto]struct{}) (element proto.Message, proto3 bool) {
+	if _, ok := checked[fd]; ok {
+		// already checked this one
+		return nil, false
+	}
+	checked[fd] = struct{}{}
+	d := l.descriptorPool[fd][name]
+	if d != nil {
+		return d, isProto3(fd)
+	}
+
+	// When public = false, we are searching only directly imported symbols. But we
+	// also need to search transitive public imports due to semantics of public imports.
+	if public {
+		for _, depIndex := range fd.PublicDependency {
+			dep := fd.Dependency[depIndex]
+			depres := l.files[dep]
+			if depres == nil {
+				// we'll catch this error later
+				continue
+			}
+			if d, proto3 := l.findSymbol(depres.fd, name, true, checked); d != nil {
+				return d, proto3
+			}
+		}
+	} else {
+		for _, dep := range fd.Dependency {
+			depres := l.files[dep]
+			if depres == nil {
+				// we'll catch this error later
+				continue
+			}
+			if d, proto3 := l.findSymbol(depres.fd, name, true, checked); d != nil {
+				return d, proto3
+			}
+		}
+	}
+
+	return nil, false
+}
+
+func isProto3(fd *dpb.FileDescriptorProto) bool {
+	return fd.GetSyntax() == "proto3"
+}
+
+func (l *linker) createdLinkedDescriptors() (map[string]*desc.FileDescriptor, error) {
+	names := make([]string, 0, len(l.files))
+	for name := range l.files {
+		names = append(names, name)
+	}
+	sort.Strings(names)
+	linked := map[string]*desc.FileDescriptor{}
+	for _, name := range names {
+		if _, err := l.linkFile(name, nil, linked); err != nil {
+			return nil, err
+		}
+	}
+	return linked, nil
+}
+
+func (l *linker) linkFile(name string, seen []string, linked map[string]*desc.FileDescriptor) (*desc.FileDescriptor, error) {
+	// check for import cycle
+	for _, s := range seen {
+		if name == s {
+			var msg bytes.Buffer
+			first := true
+			for _, s := range seen {
+				if first {
+					first = false
+				} else {
+					msg.WriteString(" -> ")
+				}
+				fmt.Fprintf(&msg, "%q", s)
+			}
+			fmt.Fprintf(&msg, " -> %q", name)
+			return nil, fmt.Errorf("cycle found in imports: %s", msg.String())
+		}
+	}
+	seen = append(seen, name)
+
+	if lfd, ok := linked[name]; ok {
+		// already linked
+		return lfd, nil
+	}
+	r := l.files[name]
+	if r == nil {
+		importer := seen[len(seen)-2] // len-1 is *this* file, before that is the one that imported it
+		return nil, fmt.Errorf("no descriptor found for %q, imported by %q", name, importer)
+	}
+	var deps []*desc.FileDescriptor
+	for _, dep := range r.fd.Dependency {
+		ldep, err := l.linkFile(dep, seen, linked)
+		if err != nil {
+			return nil, err
+		}
+		deps = append(deps, ldep)
+	}
+	lfd, err := desc.CreateFileDescriptor(r.fd, deps...)
+	if err != nil {
+		return nil, fmt.Errorf("error linking %q: %s", name, err)
+	}
+	linked[name] = lfd
+	return lfd, nil
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/options.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/options.go
new file mode 100644
index 0000000..be287f6
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/options.go
@@ -0,0 +1,1405 @@
+package protoparse
+
+import (
+	"bytes"
+	"fmt"
+	"math"
+
+	"github.com/golang/protobuf/proto"
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/desc"
+	"github.com/jhump/protoreflect/desc/internal"
+	"github.com/jhump/protoreflect/dynamic"
+)
+
+// NB: To process options, we need descriptors, but we may not have rich
+// descriptors when trying to interpret options for unlinked parsed files.
+// So we define minimal interfaces that can be backed by both rich descriptors
+// as well as their poorer cousins, plain ol' descriptor protos.
+
+type descriptorish interface {
+	GetFile() fileDescriptorish
+	GetFullyQualifiedName() string
+	AsProto() proto.Message
+}
+
+type fileDescriptorish interface {
+	descriptorish
+	GetFileOptions() *dpb.FileOptions
+	GetPackage() string
+	FindSymbol(name string) desc.Descriptor
+	GetPublicDependencies() []fileDescriptorish
+	GetDependencies() []fileDescriptorish
+	GetMessageTypes() []msgDescriptorish
+	GetExtensions() []fldDescriptorish
+	GetEnumTypes() []enumDescriptorish
+	GetServices() []svcDescriptorish
+}
+
+type msgDescriptorish interface {
+	descriptorish
+	GetMessageOptions() *dpb.MessageOptions
+	GetFields() []fldDescriptorish
+	GetOneOfs() []oneofDescriptorish
+	GetExtensionRanges() []extRangeDescriptorish
+	GetNestedMessageTypes() []msgDescriptorish
+	GetNestedExtensions() []fldDescriptorish
+	GetNestedEnumTypes() []enumDescriptorish
+}
+
+type fldDescriptorish interface {
+	descriptorish
+	GetFieldOptions() *dpb.FieldOptions
+	GetMessageType() *desc.MessageDescriptor
+	GetEnumType() *desc.EnumDescriptor
+	AsFieldDescriptorProto() *dpb.FieldDescriptorProto
+}
+
+type oneofDescriptorish interface {
+	descriptorish
+	GetOneOfOptions() *dpb.OneofOptions
+}
+
+type enumDescriptorish interface {
+	descriptorish
+	GetEnumOptions() *dpb.EnumOptions
+	GetValues() []enumValDescriptorish
+}
+
+type enumValDescriptorish interface {
+	descriptorish
+	GetEnumValueOptions() *dpb.EnumValueOptions
+}
+
+type svcDescriptorish interface {
+	descriptorish
+	GetServiceOptions() *dpb.ServiceOptions
+	GetMethods() []methodDescriptorish
+}
+
+type methodDescriptorish interface {
+	descriptorish
+	GetMethodOptions() *dpb.MethodOptions
+}
+
+// The hierarchy of descriptorish implementations backed by
+// rich descriptors:
+
+type richFileDescriptorish struct {
+	*desc.FileDescriptor
+}
+
+func (d richFileDescriptorish) GetFile() fileDescriptorish {
+	return d
+}
+
+func (d richFileDescriptorish) GetPublicDependencies() []fileDescriptorish {
+	deps := d.FileDescriptor.GetPublicDependencies()
+	ret := make([]fileDescriptorish, len(deps))
+	for i, d := range deps {
+		ret[i] = richFileDescriptorish{FileDescriptor: d}
+	}
+	return ret
+}
+
+func (d richFileDescriptorish) GetDependencies() []fileDescriptorish {
+	deps := d.FileDescriptor.GetDependencies()
+	ret := make([]fileDescriptorish, len(deps))
+	for i, d := range deps {
+		ret[i] = richFileDescriptorish{FileDescriptor: d}
+	}
+	return ret
+}
+
+func (d richFileDescriptorish) GetMessageTypes() []msgDescriptorish {
+	msgs := d.FileDescriptor.GetMessageTypes()
+	ret := make([]msgDescriptorish, len(msgs))
+	for i, m := range msgs {
+		ret[i] = richMsgDescriptorish{MessageDescriptor: m}
+	}
+	return ret
+}
+
+func (d richFileDescriptorish) GetExtensions() []fldDescriptorish {
+	flds := d.FileDescriptor.GetExtensions()
+	ret := make([]fldDescriptorish, len(flds))
+	for i, f := range flds {
+		ret[i] = richFldDescriptorish{FieldDescriptor: f}
+	}
+	return ret
+}
+
+func (d richFileDescriptorish) GetEnumTypes() []enumDescriptorish {
+	ens := d.FileDescriptor.GetEnumTypes()
+	ret := make([]enumDescriptorish, len(ens))
+	for i, en := range ens {
+		ret[i] = richEnumDescriptorish{EnumDescriptor: en}
+	}
+	return ret
+}
+
+func (d richFileDescriptorish) GetServices() []svcDescriptorish {
+	svcs := d.FileDescriptor.GetServices()
+	ret := make([]svcDescriptorish, len(svcs))
+	for i, s := range svcs {
+		ret[i] = richSvcDescriptorish{ServiceDescriptor: s}
+	}
+	return ret
+}
+
+type richMsgDescriptorish struct {
+	*desc.MessageDescriptor
+}
+
+func (d richMsgDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.MessageDescriptor.GetFile()}
+}
+
+func (d richMsgDescriptorish) GetFields() []fldDescriptorish {
+	flds := d.MessageDescriptor.GetFields()
+	ret := make([]fldDescriptorish, len(flds))
+	for i, f := range flds {
+		ret[i] = richFldDescriptorish{FieldDescriptor: f}
+	}
+	return ret
+}
+
+func (d richMsgDescriptorish) GetOneOfs() []oneofDescriptorish {
+	oos := d.MessageDescriptor.GetOneOfs()
+	ret := make([]oneofDescriptorish, len(oos))
+	for i, oo := range oos {
+		ret[i] = richOneOfDescriptorish{OneOfDescriptor: oo}
+	}
+	return ret
+}
+
+func (d richMsgDescriptorish) GetExtensionRanges() []extRangeDescriptorish {
+	md := d.MessageDescriptor
+	mdFqn := md.GetFullyQualifiedName()
+	extrs := md.AsDescriptorProto().GetExtensionRange()
+	ret := make([]extRangeDescriptorish, len(extrs))
+	for i, extr := range extrs {
+		ret[i] = extRangeDescriptorish{
+			er:   extr,
+			qual: mdFqn,
+			file: richFileDescriptorish{FileDescriptor: md.GetFile()},
+		}
+	}
+	return ret
+}
+
+func (d richMsgDescriptorish) GetNestedMessageTypes() []msgDescriptorish {
+	msgs := d.MessageDescriptor.GetNestedMessageTypes()
+	ret := make([]msgDescriptorish, len(msgs))
+	for i, m := range msgs {
+		ret[i] = richMsgDescriptorish{MessageDescriptor: m}
+	}
+	return ret
+}
+
+func (d richMsgDescriptorish) GetNestedExtensions() []fldDescriptorish {
+	flds := d.MessageDescriptor.GetNestedExtensions()
+	ret := make([]fldDescriptorish, len(flds))
+	for i, f := range flds {
+		ret[i] = richFldDescriptorish{FieldDescriptor: f}
+	}
+	return ret
+}
+
+func (d richMsgDescriptorish) GetNestedEnumTypes() []enumDescriptorish {
+	ens := d.MessageDescriptor.GetNestedEnumTypes()
+	ret := make([]enumDescriptorish, len(ens))
+	for i, en := range ens {
+		ret[i] = richEnumDescriptorish{EnumDescriptor: en}
+	}
+	return ret
+}
+
+type richFldDescriptorish struct {
+	*desc.FieldDescriptor
+}
+
+func (d richFldDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.FieldDescriptor.GetFile()}
+}
+
+func (d richFldDescriptorish) AsFieldDescriptorProto() *dpb.FieldDescriptorProto {
+	return d.FieldDescriptor.AsFieldDescriptorProto()
+}
+
+type richOneOfDescriptorish struct {
+	*desc.OneOfDescriptor
+}
+
+func (d richOneOfDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.OneOfDescriptor.GetFile()}
+}
+
+type richEnumDescriptorish struct {
+	*desc.EnumDescriptor
+}
+
+func (d richEnumDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.EnumDescriptor.GetFile()}
+}
+
+func (d richEnumDescriptorish) GetValues() []enumValDescriptorish {
+	vals := d.EnumDescriptor.GetValues()
+	ret := make([]enumValDescriptorish, len(vals))
+	for i, val := range vals {
+		ret[i] = richEnumValDescriptorish{EnumValueDescriptor: val}
+	}
+	return ret
+}
+
+type richEnumValDescriptorish struct {
+	*desc.EnumValueDescriptor
+}
+
+func (d richEnumValDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.EnumValueDescriptor.GetFile()}
+}
+
+type richSvcDescriptorish struct {
+	*desc.ServiceDescriptor
+}
+
+func (d richSvcDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.ServiceDescriptor.GetFile()}
+}
+
+func (d richSvcDescriptorish) GetMethods() []methodDescriptorish {
+	mtds := d.ServiceDescriptor.GetMethods()
+	ret := make([]methodDescriptorish, len(mtds))
+	for i, mtd := range mtds {
+		ret[i] = richMethodDescriptorish{MethodDescriptor: mtd}
+	}
+	return ret
+}
+
+type richMethodDescriptorish struct {
+	*desc.MethodDescriptor
+}
+
+func (d richMethodDescriptorish) GetFile() fileDescriptorish {
+	return richFileDescriptorish{FileDescriptor: d.MethodDescriptor.GetFile()}
+}
+
+// The hierarchy of descriptorish implementations backed by
+// plain descriptor protos:
+
+type poorFileDescriptorish struct {
+	*dpb.FileDescriptorProto
+}
+
+func (d poorFileDescriptorish) GetFile() fileDescriptorish {
+	return d
+}
+
+func (d poorFileDescriptorish) GetFullyQualifiedName() string {
+	return d.FileDescriptorProto.GetName()
+}
+
+func (d poorFileDescriptorish) AsProto() proto.Message {
+	return d.FileDescriptorProto
+}
+
+func (d poorFileDescriptorish) GetFileOptions() *dpb.FileOptions {
+	return d.FileDescriptorProto.GetOptions()
+}
+
+func (d poorFileDescriptorish) FindSymbol(name string) desc.Descriptor {
+	return nil
+}
+
+func (d poorFileDescriptorish) GetPublicDependencies() []fileDescriptorish {
+	return nil
+}
+
+func (d poorFileDescriptorish) GetDependencies() []fileDescriptorish {
+	return nil
+}
+
+func (d poorFileDescriptorish) GetMessageTypes() []msgDescriptorish {
+	msgs := d.FileDescriptorProto.GetMessageType()
+	pkg := d.FileDescriptorProto.GetPackage()
+	ret := make([]msgDescriptorish, len(msgs))
+	for i, m := range msgs {
+		ret[i] = poorMsgDescriptorish{
+			DescriptorProto: m,
+			qual:            pkg,
+			file:            d,
+		}
+	}
+	return ret
+}
+
+func (d poorFileDescriptorish) GetExtensions() []fldDescriptorish {
+	exts := d.FileDescriptorProto.GetExtension()
+	pkg := d.FileDescriptorProto.GetPackage()
+	ret := make([]fldDescriptorish, len(exts))
+	for i, e := range exts {
+		ret[i] = poorFldDescriptorish{
+			FieldDescriptorProto: e,
+			qual:                 pkg,
+			file:                 d,
+		}
+	}
+	return ret
+}
+
+func (d poorFileDescriptorish) GetEnumTypes() []enumDescriptorish {
+	ens := d.FileDescriptorProto.GetEnumType()
+	pkg := d.FileDescriptorProto.GetPackage()
+	ret := make([]enumDescriptorish, len(ens))
+	for i, e := range ens {
+		ret[i] = poorEnumDescriptorish{
+			EnumDescriptorProto: e,
+			qual:                pkg,
+			file:                d,
+		}
+	}
+	return ret
+}
+
+func (d poorFileDescriptorish) GetServices() []svcDescriptorish {
+	svcs := d.FileDescriptorProto.GetService()
+	pkg := d.FileDescriptorProto.GetPackage()
+	ret := make([]svcDescriptorish, len(svcs))
+	for i, s := range svcs {
+		ret[i] = poorSvcDescriptorish{
+			ServiceDescriptorProto: s,
+			qual:                   pkg,
+			file:                   d,
+		}
+	}
+	return ret
+}
+
+type poorMsgDescriptorish struct {
+	*dpb.DescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorMsgDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorMsgDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.DescriptorProto.GetName())
+}
+
+func qualify(qual, name string) string {
+	if qual == "" {
+		return name
+	} else {
+		return fmt.Sprintf("%s.%s", qual, name)
+	}
+}
+
+func (d poorMsgDescriptorish) AsProto() proto.Message {
+	return d.DescriptorProto
+}
+
+func (d poorMsgDescriptorish) GetMessageOptions() *dpb.MessageOptions {
+	return d.DescriptorProto.GetOptions()
+}
+
+func (d poorMsgDescriptorish) GetFields() []fldDescriptorish {
+	flds := d.DescriptorProto.GetField()
+	ret := make([]fldDescriptorish, len(flds))
+	for i, f := range flds {
+		ret[i] = poorFldDescriptorish{
+			FieldDescriptorProto: f,
+			qual:                 d.GetFullyQualifiedName(),
+			file:                 d.file,
+		}
+	}
+	return ret
+}
+
+func (d poorMsgDescriptorish) GetOneOfs() []oneofDescriptorish {
+	oos := d.DescriptorProto.GetOneofDecl()
+	ret := make([]oneofDescriptorish, len(oos))
+	for i, oo := range oos {
+		ret[i] = poorOneOfDescriptorish{
+			OneofDescriptorProto: oo,
+			qual:                 d.GetFullyQualifiedName(),
+			file:                 d.file,
+		}
+	}
+	return ret
+}
+
+func (d poorMsgDescriptorish) GetExtensionRanges() []extRangeDescriptorish {
+	mdFqn := d.GetFullyQualifiedName()
+	extrs := d.DescriptorProto.GetExtensionRange()
+	ret := make([]extRangeDescriptorish, len(extrs))
+	for i, extr := range extrs {
+		ret[i] = extRangeDescriptorish{
+			er:   extr,
+			qual: mdFqn,
+			file: d.file,
+		}
+	}
+	return ret
+}
+
+func (d poorMsgDescriptorish) GetNestedMessageTypes() []msgDescriptorish {
+	msgs := d.DescriptorProto.GetNestedType()
+	ret := make([]msgDescriptorish, len(msgs))
+	for i, m := range msgs {
+		ret[i] = poorMsgDescriptorish{
+			DescriptorProto: m,
+			qual:            d.GetFullyQualifiedName(),
+			file:            d.file,
+		}
+	}
+	return ret
+}
+
+func (d poorMsgDescriptorish) GetNestedExtensions() []fldDescriptorish {
+	flds := d.DescriptorProto.GetExtension()
+	ret := make([]fldDescriptorish, len(flds))
+	for i, f := range flds {
+		ret[i] = poorFldDescriptorish{
+			FieldDescriptorProto: f,
+			qual:                 d.GetFullyQualifiedName(),
+			file:                 d.file,
+		}
+	}
+	return ret
+}
+
+func (d poorMsgDescriptorish) GetNestedEnumTypes() []enumDescriptorish {
+	ens := d.DescriptorProto.GetEnumType()
+	ret := make([]enumDescriptorish, len(ens))
+	for i, en := range ens {
+		ret[i] = poorEnumDescriptorish{
+			EnumDescriptorProto: en,
+			qual:                d.GetFullyQualifiedName(),
+			file:                d.file,
+		}
+	}
+	return ret
+}
+
+type poorFldDescriptorish struct {
+	*dpb.FieldDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorFldDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorFldDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.FieldDescriptorProto.GetName())
+}
+
+func (d poorFldDescriptorish) AsProto() proto.Message {
+	return d.FieldDescriptorProto
+}
+
+func (d poorFldDescriptorish) GetFieldOptions() *dpb.FieldOptions {
+	return d.FieldDescriptorProto.GetOptions()
+}
+
+func (d poorFldDescriptorish) GetMessageType() *desc.MessageDescriptor {
+	return nil
+}
+
+func (d poorFldDescriptorish) GetEnumType() *desc.EnumDescriptor {
+	return nil
+}
+
+type poorOneOfDescriptorish struct {
+	*dpb.OneofDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorOneOfDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorOneOfDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.OneofDescriptorProto.GetName())
+}
+
+func (d poorOneOfDescriptorish) AsProto() proto.Message {
+	return d.OneofDescriptorProto
+}
+
+func (d poorOneOfDescriptorish) GetOneOfOptions() *dpb.OneofOptions {
+	return d.OneofDescriptorProto.GetOptions()
+}
+
+func (d poorFldDescriptorish) AsFieldDescriptorProto() *dpb.FieldDescriptorProto {
+	return d.FieldDescriptorProto
+}
+
+type poorEnumDescriptorish struct {
+	*dpb.EnumDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorEnumDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorEnumDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.EnumDescriptorProto.GetName())
+}
+
+func (d poorEnumDescriptorish) AsProto() proto.Message {
+	return d.EnumDescriptorProto
+}
+
+func (d poorEnumDescriptorish) GetEnumOptions() *dpb.EnumOptions {
+	return d.EnumDescriptorProto.GetOptions()
+}
+
+func (d poorEnumDescriptorish) GetValues() []enumValDescriptorish {
+	vals := d.EnumDescriptorProto.GetValue()
+	ret := make([]enumValDescriptorish, len(vals))
+	for i, v := range vals {
+		ret[i] = poorEnumValDescriptorish{
+			EnumValueDescriptorProto: v,
+			qual:                     d.GetFullyQualifiedName(),
+			file:                     d.file,
+		}
+	}
+	return ret
+}
+
+type poorEnumValDescriptorish struct {
+	*dpb.EnumValueDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorEnumValDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorEnumValDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.EnumValueDescriptorProto.GetName())
+}
+
+func (d poorEnumValDescriptorish) AsProto() proto.Message {
+	return d.EnumValueDescriptorProto
+}
+
+func (d poorEnumValDescriptorish) GetEnumValueOptions() *dpb.EnumValueOptions {
+	return d.EnumValueDescriptorProto.GetOptions()
+}
+
+type poorSvcDescriptorish struct {
+	*dpb.ServiceDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorSvcDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorSvcDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.ServiceDescriptorProto.GetName())
+}
+
+func (d poorSvcDescriptorish) AsProto() proto.Message {
+	return d.ServiceDescriptorProto
+}
+
+func (d poorSvcDescriptorish) GetServiceOptions() *dpb.ServiceOptions {
+	return d.ServiceDescriptorProto.GetOptions()
+}
+
+func (d poorSvcDescriptorish) GetMethods() []methodDescriptorish {
+	mtds := d.ServiceDescriptorProto.GetMethod()
+	ret := make([]methodDescriptorish, len(mtds))
+	for i, m := range mtds {
+		ret[i] = poorMethodDescriptorish{
+			MethodDescriptorProto: m,
+			qual:                  d.GetFullyQualifiedName(),
+			file:                  d.file,
+		}
+	}
+	return ret
+}
+
+type poorMethodDescriptorish struct {
+	*dpb.MethodDescriptorProto
+	qual string
+	file fileDescriptorish
+}
+
+func (d poorMethodDescriptorish) GetFile() fileDescriptorish {
+	return d.file
+}
+
+func (d poorMethodDescriptorish) GetFullyQualifiedName() string {
+	return qualify(d.qual, d.MethodDescriptorProto.GetName())
+}
+
+func (d poorMethodDescriptorish) AsProto() proto.Message {
+	return d.MethodDescriptorProto
+}
+
+func (d poorMethodDescriptorish) GetMethodOptions() *dpb.MethodOptions {
+	return d.MethodDescriptorProto.GetOptions()
+}
+
+type extRangeDescriptorish struct {
+	er   *dpb.DescriptorProto_ExtensionRange
+	qual string
+	file fileDescriptorish
+}
+
+func (er extRangeDescriptorish) GetFile() fileDescriptorish {
+	return er.file
+}
+
+func (er extRangeDescriptorish) GetFullyQualifiedName() string {
+	return qualify(er.qual, fmt.Sprintf("%d-%d", er.er.GetStart(), er.er.GetEnd()-1))
+}
+
+func (er extRangeDescriptorish) AsProto() proto.Message {
+	return er.er
+}
+
+func (er extRangeDescriptorish) GetExtensionRangeOptions() *dpb.ExtensionRangeOptions {
+	return er.er.GetOptions()
+}
+
+func interpretFileOptions(r *parseResult, fd fileDescriptorish) error {
+	opts := fd.GetFileOptions()
+	if opts != nil {
+		if len(opts.UninterpretedOption) > 0 {
+			if remain, err := interpretOptions(r, fd, opts, opts.UninterpretedOption); err != nil {
+				return err
+			} else {
+				opts.UninterpretedOption = remain
+			}
+		}
+	}
+	for _, md := range fd.GetMessageTypes() {
+		if err := interpretMessageOptions(r, md); err != nil {
+			return err
+		}
+	}
+	for _, fld := range fd.GetExtensions() {
+		if err := interpretFieldOptions(r, fld); err != nil {
+			return err
+		}
+	}
+	for _, ed := range fd.GetEnumTypes() {
+		if err := interpretEnumOptions(r, ed); err != nil {
+			return err
+		}
+	}
+	for _, sd := range fd.GetServices() {
+		opts := sd.GetServiceOptions()
+		if opts != nil {
+			if len(opts.UninterpretedOption) > 0 {
+				if remain, err := interpretOptions(r, sd, opts, opts.UninterpretedOption); err != nil {
+					return err
+				} else {
+					opts.UninterpretedOption = remain
+				}
+			}
+		}
+		for _, mtd := range sd.GetMethods() {
+			opts := mtd.GetMethodOptions()
+			if opts != nil {
+				if len(opts.UninterpretedOption) > 0 {
+					if remain, err := interpretOptions(r, mtd, opts, opts.UninterpretedOption); err != nil {
+						return err
+					} else {
+						opts.UninterpretedOption = remain
+					}
+				}
+			}
+		}
+	}
+	return nil
+}
+
+func interpretMessageOptions(r *parseResult, md msgDescriptorish) error {
+	opts := md.GetMessageOptions()
+	if opts != nil {
+		if len(opts.UninterpretedOption) > 0 {
+			if remain, err := interpretOptions(r, md, opts, opts.UninterpretedOption); err != nil {
+				return err
+			} else {
+				opts.UninterpretedOption = remain
+			}
+		}
+	}
+	for _, fld := range md.GetFields() {
+		if err := interpretFieldOptions(r, fld); err != nil {
+			return err
+		}
+	}
+	for _, ood := range md.GetOneOfs() {
+		opts := ood.GetOneOfOptions()
+		if opts != nil {
+			if len(opts.UninterpretedOption) > 0 {
+				if remain, err := interpretOptions(r, ood, opts, opts.UninterpretedOption); err != nil {
+					return err
+				} else {
+					opts.UninterpretedOption = remain
+				}
+			}
+		}
+	}
+	for _, fld := range md.GetNestedExtensions() {
+		if err := interpretFieldOptions(r, fld); err != nil {
+			return err
+		}
+	}
+	for _, er := range md.GetExtensionRanges() {
+		opts := er.GetExtensionRangeOptions()
+		if opts != nil {
+			if len(opts.UninterpretedOption) > 0 {
+				if remain, err := interpretOptions(r, er, opts, opts.UninterpretedOption); err != nil {
+					return err
+				} else {
+					opts.UninterpretedOption = remain
+				}
+			}
+		}
+	}
+	for _, nmd := range md.GetNestedMessageTypes() {
+		if err := interpretMessageOptions(r, nmd); err != nil {
+			return err
+		}
+	}
+	for _, ed := range md.GetNestedEnumTypes() {
+		if err := interpretEnumOptions(r, ed); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func interpretFieldOptions(r *parseResult, fld fldDescriptorish) error {
+	opts := fld.GetFieldOptions()
+	if opts != nil {
+		if len(opts.UninterpretedOption) > 0 {
+			uo := opts.UninterpretedOption
+			scope := fmt.Sprintf("field %s", fld.GetFullyQualifiedName())
+
+			// process json_name pseudo-option
+			if index, err := findOption(r, scope, uo, "json_name"); err != nil && !r.lenient {
+				return err
+			} else if err == nil && index >= 0 {
+				opt := uo[index]
+				optNode := r.getOptionNode(opt)
+
+				// attribute source code info
+				if on, ok := optNode.(*optionNode); ok {
+					r.interpretedOptions[on] = []int32{-1, internal.Field_jsonNameTag}
+				}
+				uo = removeOption(uo, index)
+				if opt.StringValue == nil {
+					return ErrorWithSourcePos{Pos: optNode.getValue().start(), Underlying: fmt.Errorf("%s: expecting string value for json_name option", scope)}
+				}
+				fld.AsFieldDescriptorProto().JsonName = proto.String(string(opt.StringValue))
+			}
+
+			// and process default pseudo-option
+			if index, err := processDefaultOption(r, scope, fld, uo); err != nil && !r.lenient {
+				return err
+			} else if err == nil && index >= 0 {
+				// attribute source code info
+				optNode := r.getOptionNode(uo[index])
+				if on, ok := optNode.(*optionNode); ok {
+					r.interpretedOptions[on] = []int32{-1, internal.Field_defaultTag}
+				}
+				uo = removeOption(uo, index)
+			}
+
+			if len(uo) == 0 {
+				// no real options, only pseudo-options above? clear out options
+				fld.AsFieldDescriptorProto().Options = nil
+			} else if remain, err := interpretOptions(r, fld, opts, uo); err != nil {
+				return err
+			} else {
+				opts.UninterpretedOption = remain
+			}
+		}
+	}
+	return nil
+}
+
+func processDefaultOption(res *parseResult, scope string, fld fldDescriptorish, uos []*dpb.UninterpretedOption) (defaultIndex int, err error) {
+	found, err := findOption(res, scope, uos, "default")
+	if err != nil {
+		return -1, err
+	} else if found == -1 {
+		return -1, nil
+	}
+	opt := uos[found]
+	optNode := res.getOptionNode(opt)
+	fdp := fld.AsFieldDescriptorProto()
+	if fdp.GetLabel() == dpb.FieldDescriptorProto_LABEL_REPEATED {
+		return -1, ErrorWithSourcePos{Pos: optNode.getName().start(), Underlying: fmt.Errorf("%s: default value cannot be set because field is repeated", scope)}
+	}
+	if fdp.GetType() == dpb.FieldDescriptorProto_TYPE_GROUP || fdp.GetType() == dpb.FieldDescriptorProto_TYPE_MESSAGE {
+		return -1, ErrorWithSourcePos{Pos: optNode.getName().start(), Underlying: fmt.Errorf("%s: default value cannot be set because field is a message", scope)}
+	}
+	val := optNode.getValue()
+	if _, ok := val.(*aggregateLiteralNode); ok {
+		return -1, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%s: default value cannot be an aggregate", scope)}
+	}
+	mc := &messageContext{
+		res:         res,
+		file:        fld.GetFile(),
+		elementName: fld.GetFullyQualifiedName(),
+		elementType: descriptorType(fld.AsProto()),
+		option:      opt,
+	}
+	v, err := fieldValue(res, mc, fld, val, true)
+	if err != nil {
+		return -1, err
+	}
+	if str, ok := v.(string); ok {
+		fld.AsFieldDescriptorProto().DefaultValue = proto.String(str)
+	} else if b, ok := v.([]byte); ok {
+		fld.AsFieldDescriptorProto().DefaultValue = proto.String(encodeDefaultBytes(b))
+	} else {
+		var flt float64
+		var ok bool
+		if flt, ok = v.(float64); !ok {
+			var flt32 float32
+			if flt32, ok = v.(float32); ok {
+				flt = float64(flt32)
+			}
+		}
+		if ok {
+			if math.IsInf(flt, 1) {
+				fld.AsFieldDescriptorProto().DefaultValue = proto.String("inf")
+			} else if ok && math.IsInf(flt, -1) {
+				fld.AsFieldDescriptorProto().DefaultValue = proto.String("-inf")
+			} else if ok && math.IsNaN(flt) {
+				fld.AsFieldDescriptorProto().DefaultValue = proto.String("nan")
+			} else {
+				fld.AsFieldDescriptorProto().DefaultValue = proto.String(fmt.Sprintf("%v", v))
+			}
+		} else {
+			fld.AsFieldDescriptorProto().DefaultValue = proto.String(fmt.Sprintf("%v", v))
+		}
+	}
+	return found, nil
+}
+
+func encodeDefaultBytes(b []byte) string {
+	var buf bytes.Buffer
+	writeEscapedBytes(&buf, b)
+	return buf.String()
+}
+
+func interpretEnumOptions(r *parseResult, ed enumDescriptorish) error {
+	opts := ed.GetEnumOptions()
+	if opts != nil {
+		if len(opts.UninterpretedOption) > 0 {
+			if remain, err := interpretOptions(r, ed, opts, opts.UninterpretedOption); err != nil {
+				return err
+			} else {
+				opts.UninterpretedOption = remain
+			}
+		}
+	}
+	for _, evd := range ed.GetValues() {
+		opts := evd.GetEnumValueOptions()
+		if opts != nil {
+			if len(opts.UninterpretedOption) > 0 {
+				if remain, err := interpretOptions(r, evd, opts, opts.UninterpretedOption); err != nil {
+					return err
+				} else {
+					opts.UninterpretedOption = remain
+				}
+			}
+		}
+	}
+	return nil
+}
+
+func interpretOptions(res *parseResult, element descriptorish, opts proto.Message, uninterpreted []*dpb.UninterpretedOption) ([]*dpb.UninterpretedOption, error) {
+	optsd, err := desc.LoadMessageDescriptorForMessage(opts)
+	if err != nil {
+		if res.lenient {
+			return uninterpreted, nil
+		}
+		return nil, err
+	}
+	dm := dynamic.NewMessage(optsd)
+	err = dm.ConvertFrom(opts)
+	if err != nil {
+		if res.lenient {
+			return uninterpreted, nil
+		}
+		node := res.nodes[element.AsProto()]
+		return nil, ErrorWithSourcePos{Pos: node.start(), Underlying: err}
+	}
+
+	mc := &messageContext{res: res, file: element.GetFile(), elementName: element.GetFullyQualifiedName(), elementType: descriptorType(element.AsProto())}
+	var remain []*dpb.UninterpretedOption
+	for _, uo := range uninterpreted {
+		node := res.getOptionNode(uo)
+		if !uo.Name[0].GetIsExtension() && uo.Name[0].GetNamePart() == "uninterpreted_option" {
+			if res.lenient {
+				remain = append(remain, uo)
+				continue
+			}
+			// uninterpreted_option might be found reflectively, but is not actually valid for use
+			return nil, ErrorWithSourcePos{Pos: node.getName().start(), Underlying: fmt.Errorf("%vinvalid option 'uninterpreted_option'", mc)}
+		}
+		mc.option = uo
+		path, err := interpretField(res, mc, element, dm, uo, 0, nil)
+		if err != nil {
+			if res.lenient {
+				remain = append(remain, uo)
+				continue
+			}
+			return nil, err
+		}
+		if optn, ok := node.(*optionNode); ok {
+			res.interpretedOptions[optn] = path
+		}
+	}
+
+	if err := dm.ValidateRecursive(); err != nil {
+		// if lenient, we'll let this pass, but it means that some required field was not set!
+		// TODO: do this in a more granular way, so we can validate individual fields
+		// and leave them uninterpreted, instead of just having to live with the
+		// thing having invalid data in extensions.
+		if !res.lenient {
+			node := res.nodes[element.AsProto()]
+			return nil, ErrorWithSourcePos{Pos: node.start(), Underlying: fmt.Errorf("error in %s options: %v", descriptorType(element.AsProto()), err)}
+		}
+	}
+
+	if res.lenient {
+		// If we're lenient, then we don't want to clobber the passed in message
+		// and leave it partially populated. So we convert into a copy first
+		optsClone := proto.Clone(opts)
+		if err := dm.ConvertTo(optsClone); err != nil {
+			// TODO: do this in a more granular way, so we can convert individual
+			// fields and leave bad ones uninterpreted instead of skipping all of
+			// the work we've done so far.
+			return uninterpreted, nil
+		}
+		// conversion from dynamic message above worked, so now
+		// it is safe to overwrite the passed in message
+		opts.Reset()
+		proto.Merge(opts, optsClone)
+
+	} else {
+		// not lenient: try to convert into the passed in message
+		// and fail is not successful
+		if err := dm.ConvertTo(opts); err != nil {
+			node := res.nodes[element.AsProto()]
+			return nil, ErrorWithSourcePos{Pos: node.start(), Underlying: err}
+		}
+	}
+
+	return remain, nil
+}
+
+func interpretField(res *parseResult, mc *messageContext, element descriptorish, dm *dynamic.Message, opt *dpb.UninterpretedOption, nameIndex int, pathPrefix []int32) (path []int32, err error) {
+	var fld *desc.FieldDescriptor
+	nm := opt.GetName()[nameIndex]
+	node := res.getOptionNamePartNode(nm)
+	if nm.GetIsExtension() {
+		extName := nm.GetNamePart()
+		if extName[0] == '.' {
+			extName = extName[1:] /* skip leading dot */
+		}
+		fld = findExtension(element.GetFile(), extName, false, map[fileDescriptorish]struct{}{})
+		if fld == nil {
+			return nil, ErrorWithSourcePos{
+				Pos: node.start(),
+				Underlying: fmt.Errorf("%vunrecognized extension %s of %s",
+					mc, extName, dm.GetMessageDescriptor().GetFullyQualifiedName()),
+			}
+		}
+		if fld.GetOwner().GetFullyQualifiedName() != dm.GetMessageDescriptor().GetFullyQualifiedName() {
+			return nil, ErrorWithSourcePos{
+				Pos: node.start(),
+				Underlying: fmt.Errorf("%vextension %s should extend %s but instead extends %s",
+					mc, extName, dm.GetMessageDescriptor().GetFullyQualifiedName(), fld.GetOwner().GetFullyQualifiedName()),
+			}
+		}
+	} else {
+		fld = dm.GetMessageDescriptor().FindFieldByName(nm.GetNamePart())
+		if fld == nil {
+			return nil, ErrorWithSourcePos{
+				Pos: node.start(),
+				Underlying: fmt.Errorf("%vfield %s of %s does not exist",
+					mc, nm.GetNamePart(), dm.GetMessageDescriptor().GetFullyQualifiedName()),
+			}
+		}
+	}
+
+	path = append(pathPrefix, fld.GetNumber())
+
+	if len(opt.GetName()) > nameIndex+1 {
+		nextnm := opt.GetName()[nameIndex+1]
+		nextnode := res.getOptionNamePartNode(nextnm)
+		if fld.GetType() != dpb.FieldDescriptorProto_TYPE_MESSAGE {
+			return nil, ErrorWithSourcePos{
+				Pos: nextnode.start(),
+				Underlying: fmt.Errorf("%vcannot set field %s because %s is not a message",
+					mc, nextnm.GetNamePart(), nm.GetNamePart()),
+			}
+		}
+		if fld.IsRepeated() {
+			return nil, ErrorWithSourcePos{
+				Pos: nextnode.start(),
+				Underlying: fmt.Errorf("%vcannot set field %s because %s is repeated (must use an aggregate)",
+					mc, nextnm.GetNamePart(), nm.GetNamePart()),
+			}
+		}
+		var fdm *dynamic.Message
+		var err error
+		if dm.HasField(fld) {
+			var v interface{}
+			v, err = dm.TryGetField(fld)
+			fdm, _ = v.(*dynamic.Message)
+		} else {
+			fdm = dynamic.NewMessage(fld.GetMessageType())
+			err = dm.TrySetField(fld, fdm)
+		}
+		if err != nil {
+			return nil, ErrorWithSourcePos{Pos: node.start(), Underlying: err}
+		}
+		// recurse to set next part of name
+		return interpretField(res, mc, element, fdm, opt, nameIndex+1, path)
+	}
+
+	optNode := res.getOptionNode(opt)
+	if err := setOptionField(res, mc, dm, fld, node, optNode.getValue()); err != nil {
+		return nil, err
+	}
+	if fld.IsRepeated() {
+		path = append(path, int32(dm.FieldLength(fld))-1)
+	}
+	return path, nil
+}
+
+func findExtension(fd fileDescriptorish, name string, public bool, checked map[fileDescriptorish]struct{}) *desc.FieldDescriptor {
+	if _, ok := checked[fd]; ok {
+		return nil
+	}
+	checked[fd] = struct{}{}
+	d := fd.FindSymbol(name)
+	if d != nil {
+		if fld, ok := d.(*desc.FieldDescriptor); ok {
+			return fld
+		}
+		return nil
+	}
+
+	// When public = false, we are searching only directly imported symbols. But we
+	// also need to search transitive public imports due to semantics of public imports.
+	if public {
+		for _, dep := range fd.GetPublicDependencies() {
+			d := findExtension(dep, name, true, checked)
+			if d != nil {
+				return d
+			}
+		}
+	} else {
+		for _, dep := range fd.GetDependencies() {
+			d := findExtension(dep, name, true, checked)
+			if d != nil {
+				return d
+			}
+		}
+	}
+	return nil
+}
+
+func setOptionField(res *parseResult, mc *messageContext, dm *dynamic.Message, fld *desc.FieldDescriptor, name node, val valueNode) error {
+	v := val.value()
+	if sl, ok := v.([]valueNode); ok {
+		// handle slices a little differently than the others
+		if !fld.IsRepeated() {
+			return ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue is an array but field is not repeated", mc)}
+		}
+		origPath := mc.optAggPath
+		defer func() {
+			mc.optAggPath = origPath
+		}()
+		for index, item := range sl {
+			mc.optAggPath = fmt.Sprintf("%s[%d]", origPath, index)
+			if v, err := fieldValue(res, mc, richFldDescriptorish{FieldDescriptor: fld}, item, false); err != nil {
+				return err
+			} else if err = dm.TryAddRepeatedField(fld, v); err != nil {
+				return ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%verror setting value: %s", mc, err)}
+			}
+		}
+		return nil
+	}
+
+	v, err := fieldValue(res, mc, richFldDescriptorish{FieldDescriptor: fld}, val, false)
+	if err != nil {
+		return err
+	}
+	if fld.IsRepeated() {
+		err = dm.TryAddRepeatedField(fld, v)
+	} else {
+		if dm.HasField(fld) {
+			return ErrorWithSourcePos{Pos: name.start(), Underlying: fmt.Errorf("%vnon-repeated option field %s already set", mc, fieldName(fld))}
+		}
+		err = dm.TrySetField(fld, v)
+	}
+	if err != nil {
+		return ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%verror setting value: %s", mc, err)}
+	}
+
+	return nil
+}
+
+type messageContext struct {
+	res         *parseResult
+	file        fileDescriptorish
+	elementType string
+	elementName string
+	option      *dpb.UninterpretedOption
+	optAggPath  string
+}
+
+func (c *messageContext) String() string {
+	var ctx bytes.Buffer
+	if c.elementType != "file" {
+		fmt.Fprintf(&ctx, "%s %s: ", c.elementType, c.elementName)
+	}
+	if c.option != nil && c.option.Name != nil {
+		ctx.WriteString("option ")
+		writeOptionName(&ctx, c.option.Name)
+		if c.res.nodes == nil {
+			// if we have no source position info, try to provide as much context
+			// as possible (if nodes != nil, we don't need this because any errors
+			// will actually have file and line numbers)
+			if c.optAggPath != "" {
+				fmt.Fprintf(&ctx, " at %s", c.optAggPath)
+			}
+		}
+		ctx.WriteString(": ")
+	}
+	return ctx.String()
+}
+
+func writeOptionName(buf *bytes.Buffer, parts []*dpb.UninterpretedOption_NamePart) {
+	first := true
+	for _, p := range parts {
+		if first {
+			first = false
+		} else {
+			buf.WriteByte('.')
+		}
+		nm := p.GetNamePart()
+		if nm[0] == '.' {
+			// skip leading dot
+			nm = nm[1:]
+		}
+		if p.GetIsExtension() {
+			buf.WriteByte('(')
+			buf.WriteString(nm)
+			buf.WriteByte(')')
+		} else {
+			buf.WriteString(nm)
+		}
+	}
+}
+
+func fieldName(fld *desc.FieldDescriptor) string {
+	if fld.IsExtension() {
+		return fld.GetFullyQualifiedName()
+	} else {
+		return fld.GetName()
+	}
+}
+
+func valueKind(val interface{}) string {
+	switch val := val.(type) {
+	case identifier:
+		return "identifier"
+	case bool:
+		return "bool"
+	case int64:
+		if val < 0 {
+			return "negative integer"
+		}
+		return "integer"
+	case uint64:
+		return "integer"
+	case float64:
+		return "double"
+	case string, []byte:
+		return "string"
+	case []*aggregateEntryNode:
+		return "message"
+	default:
+		return fmt.Sprintf("%T", val)
+	}
+}
+
+func fieldValue(res *parseResult, mc *messageContext, fld fldDescriptorish, val valueNode, enumAsString bool) (interface{}, error) {
+	v := val.value()
+	t := fld.AsFieldDescriptorProto().GetType()
+	switch t {
+	case dpb.FieldDescriptorProto_TYPE_ENUM:
+		if id, ok := v.(identifier); ok {
+			ev := fld.GetEnumType().FindValueByName(string(id))
+			if ev == nil {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%venum %s has no value named %s", mc, fld.GetEnumType().GetFullyQualifiedName(), id)}
+			}
+			if enumAsString {
+				return ev.GetName(), nil
+			} else {
+				return ev.GetNumber(), nil
+			}
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting enum, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_MESSAGE, dpb.FieldDescriptorProto_TYPE_GROUP:
+		if aggs, ok := v.([]*aggregateEntryNode); ok {
+			fmd := fld.GetMessageType()
+			fdm := dynamic.NewMessage(fmd)
+			origPath := mc.optAggPath
+			defer func() {
+				mc.optAggPath = origPath
+			}()
+			for _, a := range aggs {
+				if origPath == "" {
+					mc.optAggPath = a.name.value()
+				} else {
+					mc.optAggPath = origPath + "." + a.name.value()
+				}
+				var ffld *desc.FieldDescriptor
+				if a.name.isExtension {
+					n := a.name.name.val
+					ffld = findExtension(mc.file, n, false, map[fileDescriptorish]struct{}{})
+					if ffld == nil {
+						// may need to qualify with package name
+						pkg := mc.file.GetPackage()
+						if pkg != "" {
+							ffld = findExtension(mc.file, pkg+"."+n, false, map[fileDescriptorish]struct{}{})
+						}
+					}
+				} else {
+					ffld = fmd.FindFieldByName(a.name.value())
+				}
+				if ffld == nil {
+					return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vfield %s not found", mc, a.name.name.val)}
+				}
+				if err := setOptionField(res, mc, fdm, ffld, a.name, a.val); err != nil {
+					return nil, err
+				}
+			}
+			return fdm, nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting message, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_BOOL:
+		if b, ok := v.(bool); ok {
+			return b, nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting bool, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_BYTES:
+		if str, ok := v.(string); ok {
+			return []byte(str), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting bytes, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_STRING:
+		if str, ok := v.(string); ok {
+			return str, nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting string, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_INT32, dpb.FieldDescriptorProto_TYPE_SINT32, dpb.FieldDescriptorProto_TYPE_SFIXED32:
+		if i, ok := v.(int64); ok {
+			if i > math.MaxInt32 || i < math.MinInt32 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for int32", mc, i)}
+			}
+			return int32(i), nil
+		}
+		if ui, ok := v.(uint64); ok {
+			if ui > math.MaxInt32 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for int32", mc, ui)}
+			}
+			return int32(ui), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting int32, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_UINT32, dpb.FieldDescriptorProto_TYPE_FIXED32:
+		if i, ok := v.(int64); ok {
+			if i > math.MaxUint32 || i < 0 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for uint32", mc, i)}
+			}
+			return uint32(i), nil
+		}
+		if ui, ok := v.(uint64); ok {
+			if ui > math.MaxUint32 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for uint32", mc, ui)}
+			}
+			return uint32(ui), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting uint32, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_INT64, dpb.FieldDescriptorProto_TYPE_SINT64, dpb.FieldDescriptorProto_TYPE_SFIXED64:
+		if i, ok := v.(int64); ok {
+			return i, nil
+		}
+		if ui, ok := v.(uint64); ok {
+			if ui > math.MaxInt64 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for int64", mc, ui)}
+			}
+			return int64(ui), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting int64, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_UINT64, dpb.FieldDescriptorProto_TYPE_FIXED64:
+		if i, ok := v.(int64); ok {
+			if i < 0 {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %d is out of range for uint64", mc, i)}
+			}
+			return uint64(i), nil
+		}
+		if ui, ok := v.(uint64); ok {
+			return ui, nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting uint64, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_DOUBLE:
+		if d, ok := v.(float64); ok {
+			return d, nil
+		}
+		if i, ok := v.(int64); ok {
+			return float64(i), nil
+		}
+		if u, ok := v.(uint64); ok {
+			return float64(u), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting double, got %s", mc, valueKind(v))}
+	case dpb.FieldDescriptorProto_TYPE_FLOAT:
+		if d, ok := v.(float64); ok {
+			if (d > math.MaxFloat32 || d < -math.MaxFloat32) && !math.IsInf(d, 1) && !math.IsInf(d, -1) && !math.IsNaN(d) {
+				return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vvalue %f is out of range for float", mc, d)}
+			}
+			return float32(d), nil
+		}
+		if i, ok := v.(int64); ok {
+			return float32(i), nil
+		}
+		if u, ok := v.(uint64); ok {
+			return float32(u), nil
+		}
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vexpecting float, got %s", mc, valueKind(v))}
+	default:
+		return nil, ErrorWithSourcePos{Pos: val.start(), Underlying: fmt.Errorf("%vunrecognized field type: %s", mc, t)}
+	}
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/parser.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/parser.go
new file mode 100644
index 0000000..ce9a3e4
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/parser.go
@@ -0,0 +1,1520 @@
+package protoparse
+
+import (
+	"bytes"
+	"errors"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"math"
+	"os"
+	"path/filepath"
+	"sort"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/desc"
+	"github.com/jhump/protoreflect/desc/internal"
+)
+
+//go:generate goyacc -o proto.y.go -p proto proto.y
+
+var errNoImportPathsForAbsoluteFilePath = errors.New("must specify at least one import path if any absolute file paths are given")
+
+func init() {
+	protoErrorVerbose = true
+
+	// fix up the generated "token name" array so that error messages are nicer
+	setTokenName(_STRING_LIT, "string literal")
+	setTokenName(_INT_LIT, "int literal")
+	setTokenName(_FLOAT_LIT, "float literal")
+	setTokenName(_NAME, "identifier")
+	setTokenName(_FQNAME, "fully-qualified name")
+	setTokenName(_TYPENAME, "type name")
+	setTokenName(_ERROR, "error")
+	// for keywords, just show the keyword itself wrapped in quotes
+	for str, i := range keywords {
+		setTokenName(i, fmt.Sprintf(`"%s"`, str))
+	}
+}
+
+func setTokenName(token int, text string) {
+	// NB: this is based on logic in generated parse code that translates the
+	// int returned from the lexer into an internal token number.
+	var intern int
+	if token < len(protoTok1) {
+		intern = protoTok1[token]
+	} else {
+		if token >= protoPrivate {
+			if token < protoPrivate+len(protoTok2) {
+				intern = protoTok2[token-protoPrivate]
+			}
+		}
+		if intern == 0 {
+			for i := 0; i+1 < len(protoTok3); i += 2 {
+				if protoTok3[i] == token {
+					intern = protoTok3[i+1]
+					break
+				}
+			}
+		}
+	}
+
+	if intern >= 1 && intern-1 < len(protoToknames) {
+		protoToknames[intern-1] = text
+		return
+	}
+
+	panic(fmt.Sprintf("Unknown token value: %d", token))
+}
+
+// FileAccessor is an abstraction for opening proto source files. It takes the
+// name of the file to open and returns either the input reader or an error.
+type FileAccessor func(filename string) (io.ReadCloser, error)
+
+// FileContentsFromMap returns a FileAccessor that uses the given map of file
+// contents. This allows proto source files to be constructed in memory and
+// easily supplied to a parser. The map keys are the paths to the proto source
+// files, and the values are the actual proto source contents.
+func FileContentsFromMap(files map[string]string) FileAccessor {
+	return func(filename string) (io.ReadCloser, error) {
+		contents, ok := files[filename]
+		if !ok {
+			return nil, os.ErrNotExist
+		}
+		return ioutil.NopCloser(strings.NewReader(contents)), nil
+	}
+}
+
+// ResolveFilenames tries to resolve fileNames into paths that are relative to
+// directories in the given importPaths. The returned slice has the results in
+// the same order as they are supplied in fileNames.
+//
+// The resulting names should be suitable for passing to Parser.ParseFiles.
+//
+// If importPaths is empty and any path is absolute, this returns error.
+// If importPaths is empty and all paths are relative, this returns the original fileNames.
+func ResolveFilenames(importPaths []string, fileNames ...string) ([]string, error) {
+	if len(importPaths) == 0 {
+		if containsAbsFilePath(fileNames) {
+			// We have to do this as otherwise parseProtoFiles can result in duplicate symbols.
+			// For example, assume we import "foo/bar/bar.proto" in a file "/home/alice/dev/foo/bar/baz.proto"
+			// as we call ParseFiles("/home/alice/dev/foo/bar/bar.proto","/home/alice/dev/foo/bar/baz.proto")
+			// with "/home/alice/dev" as our current directory. Due to the recursive nature of parseProtoFiles,
+			// it will discover the import "foo/bar/bar.proto" in the input file, and call parse on this,
+			// adding "foo/bar/bar.proto" to the parsed results, as well as "/home/alice/dev/foo/bar/bar.proto"
+			// from the input file list. This will result in a
+			// 'duplicate symbol SYMBOL: already defined as field in "/home/alice/dev/foo/bar/bar.proto'
+			// error being returned from ParseFiles.
+			return nil, errNoImportPathsForAbsoluteFilePath
+		}
+		return fileNames, nil
+	}
+	absImportPaths, err := absoluteFilePaths(importPaths)
+	if err != nil {
+		return nil, err
+	}
+	absFileNames, err := absoluteFilePaths(fileNames)
+	if err != nil {
+		return nil, err
+	}
+	resolvedFileNames := make([]string, 0, len(fileNames))
+	for _, absFileName := range absFileNames {
+		resolvedFileName, err := resolveAbsFilename(absImportPaths, absFileName)
+		if err != nil {
+			return nil, err
+		}
+		resolvedFileNames = append(resolvedFileNames, resolvedFileName)
+	}
+	return resolvedFileNames, nil
+}
+
+// Parser parses proto source into descriptors.
+type Parser struct {
+	// The paths used to search for dependencies that are referenced in import
+	// statements in proto source files. If no import paths are provided then
+	// "." (current directory) is assumed to be the only import path.
+	//
+	// This setting is only used during ParseFiles operations. Since calls to
+	// ParseFilesButDoNotLink do not link, there is no need to load and parse
+	// dependencies.
+	ImportPaths []string
+
+	// If true, the supplied file names/paths need not necessarily match how the
+	// files are referenced in import statements. The parser will attempt to
+	// match import statements to supplied paths, "guessing" the import paths
+	// for the files. Note that this inference is not perfect and link errors
+	// could result. It works best when all proto files are organized such that
+	// a single import path can be inferred (e.g. all files under a single tree
+	// with import statements all being relative to the root of this tree).
+	InferImportPaths bool
+
+	// Used to create a reader for a given filename, when loading proto source
+	// file contents. If unset, os.Open is used. If ImportPaths is also empty
+	// then relative paths are will be relative to the process's current working
+	// directory.
+	Accessor FileAccessor
+
+	// If true, the resulting file descriptors will retain source code info,
+	// that maps elements to their location in the source files as well as
+	// includes comments found during parsing (and attributed to elements of
+	// the source file).
+	IncludeSourceCodeInfo bool
+
+	// If true, the results from ParseFilesButDoNotLink will be passed through
+	// some additional validations. But only constraints that do not require
+	// linking can be checked. These include proto2 vs. proto3 language features,
+	// looking for incorrect usage of reserved names or tags, and ensuring that
+	// fields have unique tags and that enum values have unique numbers (unless
+	// the enum allows aliases).
+	ValidateUnlinkedFiles bool
+
+	// If true, the results from ParseFilesButDoNotLink will have options
+	// interpreted. Any uninterpretable options (including any custom options or
+	// options that refer to message and enum types, which can only be
+	// interpreted after linking) will be left in uninterpreted_options. Also,
+	// the "default" pseudo-option for fields can only be interpreted for scalar
+	// fields, excluding enums. (Interpreting default values for enum fields
+	// requires resolving enum names, which requires linking.)
+	InterpretOptionsInUnlinkedFiles bool
+}
+
+// ParseFiles parses the named files into descriptors. The returned slice has
+// the same number of entries as the give filenames, in the same order. So the
+// first returned descriptor corresponds to the first given name, and so on.
+//
+// All dependencies for all specified files (including transitive dependencies)
+// must be accessible via the parser's Accessor or a link error will occur. The
+// exception to this rule is that files can import standard Google-provided
+// files -- e.g. google/protobuf/*.proto -- without needing to supply sources
+// for these files. Like protoc, this parser has a built-in version of these
+// files it can use if they aren't explicitly supplied.
+func (p Parser) ParseFiles(filenames ...string) ([]*desc.FileDescriptor, error) {
+	accessor := p.Accessor
+	if accessor == nil {
+		accessor = func(name string) (io.ReadCloser, error) {
+			return os.Open(name)
+		}
+	}
+	paths := p.ImportPaths
+	if len(paths) > 0 {
+		acc := accessor
+		accessor = func(name string) (io.ReadCloser, error) {
+			var ret error
+			for _, path := range paths {
+				f, err := acc(filepath.Join(path, name))
+				if err != nil {
+					if ret == nil {
+						ret = err
+					}
+					continue
+				}
+				return f, nil
+			}
+			return nil, ret
+		}
+	}
+
+	protos := map[string]*parseResult{}
+	err := parseProtoFiles(accessor, filenames, true, true, protos)
+	if err != nil {
+		return nil, err
+	}
+	if p.InferImportPaths {
+		protos = fixupFilenames(protos)
+	}
+	linkedProtos, err := newLinker(protos).linkFiles()
+	if err != nil {
+		return nil, err
+	}
+	if p.IncludeSourceCodeInfo {
+		for name, fd := range linkedProtos {
+			pr := protos[name]
+			fd.AsFileDescriptorProto().SourceCodeInfo = pr.generateSourceCodeInfo()
+			internal.RecomputeSourceInfo(fd)
+		}
+	}
+	fds := make([]*desc.FileDescriptor, len(filenames))
+	for i, name := range filenames {
+		fd := linkedProtos[name]
+		fds[i] = fd
+	}
+	return fds, nil
+}
+
+// ParseFilesButDoNotLink parses the named files into descriptor protos. The
+// results are just protos, not fully-linked descriptors. It is possible that
+// descriptors are invalid and still be returned in parsed form without error
+// due to the fact that the linking step is skipped (and thus many validation
+// steps omitted).
+//
+// There are a few side effects to not linking the descriptors:
+//   1. No options will be interpreted. Options can refer to extensions or have
+//      message and enum types. Without linking, these extension and type
+//      references are not resolved, so the options may not be interpretable.
+//      So all options will appear in UninterpretedOption fields of the various
+//      descriptor options messages.
+//   2. Type references will not be resolved. This means that the actual type
+//      names in the descriptors may be unqualified and even relative to the
+//      scope in which the type reference appears. This goes for fields that
+//      have message and enum types. It also applies to methods and their
+//      references to request and response message types.
+//   3. Enum fields are not known. Until a field's type reference is resolved
+//      (during linking), it is not known whether the type refers to a message
+//      or an enum. So all fields with such type references have their Type set
+//      to TYPE_MESSAGE.
+//
+// This method will still validate the syntax of parsed files. If the parser's
+// ValidateUnlinkedFiles field is true, additional checks, beyond syntax will
+// also be performed.
+func (p Parser) ParseFilesButDoNotLink(filenames ...string) ([]*dpb.FileDescriptorProto, error) {
+	accessor := p.Accessor
+	if accessor == nil {
+		accessor = func(name string) (io.ReadCloser, error) {
+			return os.Open(name)
+		}
+	}
+
+	protos := map[string]*parseResult{}
+	err := parseProtoFiles(accessor, filenames, false, p.ValidateUnlinkedFiles, protos)
+	if err != nil {
+		return nil, err
+	}
+	if p.InferImportPaths {
+		protos = fixupFilenames(protos)
+	}
+	fds := make([]*dpb.FileDescriptorProto, len(filenames))
+	for i, name := range filenames {
+		pr := protos[name]
+		fd := pr.fd
+		if p.InterpretOptionsInUnlinkedFiles {
+			pr.lenient = true
+			interpretFileOptions(pr, poorFileDescriptorish{FileDescriptorProto: fd})
+		}
+		if p.IncludeSourceCodeInfo {
+			fd.SourceCodeInfo = pr.generateSourceCodeInfo()
+		}
+		fds[i] = fd
+	}
+	return fds, nil
+}
+
+func containsAbsFilePath(filePaths []string) bool {
+	for _, filePath := range filePaths {
+		if filepath.IsAbs(filePath) {
+			return true
+		}
+	}
+	return false
+}
+
+func absoluteFilePaths(filePaths []string) ([]string, error) {
+	absFilePaths := make([]string, 0, len(filePaths))
+	for _, filePath := range filePaths {
+		absFilePath, err := filepath.Abs(filePath)
+		if err != nil {
+			return nil, err
+		}
+		absFilePaths = append(absFilePaths, absFilePath)
+	}
+	return absFilePaths, nil
+}
+
+func resolveAbsFilename(absImportPaths []string, absFileName string) (string, error) {
+	for _, absImportPath := range absImportPaths {
+		if isDescendant(absImportPath, absFileName) {
+			resolvedPath, err := filepath.Rel(absImportPath, absFileName)
+			if err != nil {
+				return "", err
+			}
+			return resolvedPath, nil
+		}
+	}
+	return "", fmt.Errorf("%s does not reside in any import path", absFileName)
+}
+
+// isDescendant returns true if file is a descendant of dir.
+func isDescendant(dir, file string) bool {
+	dir = filepath.Clean(dir)
+	cur := file
+	for {
+		d := filepath.Dir(cur)
+		if d == dir {
+			return true
+		}
+		if d == "." || d == cur {
+			// we've run out of path elements
+			return false
+		}
+		cur = d
+	}
+}
+
+func fixupFilenames(protos map[string]*parseResult) map[string]*parseResult {
+	// In the event that the given filenames (keys in the supplied map) do not
+	// match the actual paths used in 'import' statements in the files, we try
+	// to revise names in the protos so that they will match and be linkable.
+	revisedProtos := map[string]*parseResult{}
+
+	protoPaths := map[string]struct{}{}
+	// TODO: this is O(n^2) but could likely be O(n) with a clever data structure (prefix tree that is indexed backwards?)
+	importCandidates := map[string]map[string]struct{}{}
+	candidatesAvailable := map[string]struct{}{}
+	for name := range protos {
+		candidatesAvailable[name] = struct{}{}
+		for _, f := range protos {
+			for _, imp := range f.fd.Dependency {
+				if strings.HasSuffix(name, imp) {
+					candidates := importCandidates[imp]
+					if candidates == nil {
+						candidates = map[string]struct{}{}
+						importCandidates[imp] = candidates
+					}
+					candidates[name] = struct{}{}
+				}
+			}
+		}
+	}
+	for imp, candidates := range importCandidates {
+		// if we found multiple possible candidates, use the one that is an exact match
+		// if it exists, and otherwise, guess that it's the shortest path (fewest elements)
+		var best string
+		for c := range candidates {
+			if _, ok := candidatesAvailable[c]; !ok {
+				// already used this candidate and re-written its filename accordingly
+				continue
+			}
+			if c == imp {
+				// exact match!
+				best = c
+				break
+			}
+			if best == "" {
+				best = c
+			} else {
+				// HACK: we can't actually tell which files is supposed to match
+				// this import, so arbitrarily pick the "shorter" one (fewest
+				// path elements) or, on a tie, the lexically earlier one
+				minLen := strings.Count(best, string(filepath.Separator))
+				cLen := strings.Count(c, string(filepath.Separator))
+				if cLen < minLen || (cLen == minLen && c < best) {
+					best = c
+				}
+			}
+		}
+		if best != "" {
+			prefix := best[:len(best)-len(imp)]
+			if len(prefix) > 0 {
+				protoPaths[prefix] = struct{}{}
+			}
+			f := protos[best]
+			f.fd.Name = proto.String(imp)
+			revisedProtos[imp] = f
+			delete(candidatesAvailable, best)
+		}
+	}
+
+	if len(candidatesAvailable) == 0 {
+		return revisedProtos
+	}
+
+	if len(protoPaths) == 0 {
+		for c := range candidatesAvailable {
+			revisedProtos[c] = protos[c]
+		}
+		return revisedProtos
+	}
+
+	// Any remaining candidates are entry-points (not imported by others), so
+	// the best bet to "fixing" their file name is to see if they're in one of
+	// the proto paths we found, and if so strip that prefix.
+	protoPathStrs := make([]string, len(protoPaths))
+	i := 0
+	for p := range protoPaths {
+		protoPathStrs[i] = p
+		i++
+	}
+	sort.Strings(protoPathStrs)
+	// we look at paths in reverse order, so we'll use a longer proto path if
+	// there is more than one match
+	for c := range candidatesAvailable {
+		var imp string
+		for i := len(protoPathStrs) - 1; i >= 0; i-- {
+			p := protoPathStrs[i]
+			if strings.HasPrefix(c, p) {
+				imp = c[len(p):]
+				break
+			}
+		}
+		if imp != "" {
+			f := protos[c]
+			f.fd.Name = proto.String(imp)
+			revisedProtos[imp] = f
+		} else {
+			revisedProtos[c] = protos[c]
+		}
+	}
+
+	return revisedProtos
+}
+
+func parseProtoFiles(acc FileAccessor, filenames []string, recursive, validate bool, parsed map[string]*parseResult) error {
+	for _, name := range filenames {
+		if _, ok := parsed[name]; ok {
+			continue
+		}
+		in, err := acc(name)
+		if err != nil {
+			if d, ok := standardImports[name]; ok {
+				parsed[name] = &parseResult{fd: d}
+				continue
+			}
+			return err
+		}
+		func() {
+			defer in.Close()
+			parsed[name], err = parseProto(name, in, validate)
+		}()
+		if err != nil {
+			return err
+		}
+		if recursive {
+			err = parseProtoFiles(acc, parsed[name].fd.Dependency, true, validate, parsed)
+			if err != nil {
+				return fmt.Errorf("failed to load imports for %q: %s", name, err)
+			}
+		}
+	}
+	return nil
+}
+
+type parseResult struct {
+	// the parsed file descriptor
+	fd *dpb.FileDescriptorProto
+
+	// if set to true, enables lenient interpretation of options, where
+	// unrecognized options will be left uninterpreted instead of resulting in a
+	// link error
+	lenient bool
+
+	// a map of elements in the descriptor to nodes in the AST
+	// (for extracting position information when validating the descriptor)
+	nodes map[proto.Message]node
+
+	// a map of uninterpreted option AST nodes to their relative path
+	// in the resulting options message
+	interpretedOptions map[*optionNode][]int32
+}
+
+func (r *parseResult) getFileNode(f *dpb.FileDescriptorProto) fileDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(f.GetName())}
+	}
+	return r.nodes[f].(fileDecl)
+}
+
+func (r *parseResult) getOptionNode(o *dpb.UninterpretedOption) optionDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[o].(optionDecl)
+}
+
+func (r *parseResult) getOptionNamePartNode(o *dpb.UninterpretedOption_NamePart) node {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[o]
+}
+
+func (r *parseResult) getMessageNode(m *dpb.DescriptorProto) msgDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[m].(msgDecl)
+}
+
+func (r *parseResult) getFieldNode(f *dpb.FieldDescriptorProto) fieldDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[f].(fieldDecl)
+}
+
+func (r *parseResult) getOneOfNode(o *dpb.OneofDescriptorProto) node {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[o]
+}
+
+func (r *parseResult) getExtensionRangeNode(e *dpb.DescriptorProto_ExtensionRange) rangeDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[e].(rangeDecl)
+}
+
+func (r *parseResult) getMessageReservedRangeNode(rr *dpb.DescriptorProto_ReservedRange) rangeDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[rr].(rangeDecl)
+}
+
+func (r *parseResult) getEnumNode(e *dpb.EnumDescriptorProto) node {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[e]
+}
+
+func (r *parseResult) getEnumValueNode(e *dpb.EnumValueDescriptorProto) enumValueDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[e].(enumValueDecl)
+}
+
+func (r *parseResult) getEnumReservedRangeNode(rr *dpb.EnumDescriptorProto_EnumReservedRange) rangeDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[rr].(rangeDecl)
+}
+
+func (r *parseResult) getServiceNode(s *dpb.ServiceDescriptorProto) node {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[s]
+}
+
+func (r *parseResult) getMethodNode(m *dpb.MethodDescriptorProto) methodDecl {
+	if r.nodes == nil {
+		return noSourceNode{pos: unknownPos(r.fd.GetName())}
+	}
+	return r.nodes[m].(methodDecl)
+}
+
+func (r *parseResult) putFileNode(f *dpb.FileDescriptorProto, n *fileNode) {
+	r.nodes[f] = n
+}
+
+func (r *parseResult) putOptionNode(o *dpb.UninterpretedOption, n *optionNode) {
+	r.nodes[o] = n
+}
+
+func (r *parseResult) putOptionNamePartNode(o *dpb.UninterpretedOption_NamePart, n *optionNamePartNode) {
+	r.nodes[o] = n
+}
+
+func (r *parseResult) putMessageNode(m *dpb.DescriptorProto, n msgDecl) {
+	r.nodes[m] = n
+}
+
+func (r *parseResult) putFieldNode(f *dpb.FieldDescriptorProto, n fieldDecl) {
+	r.nodes[f] = n
+}
+
+func (r *parseResult) putOneOfNode(o *dpb.OneofDescriptorProto, n *oneOfNode) {
+	r.nodes[o] = n
+}
+
+func (r *parseResult) putExtensionRangeNode(e *dpb.DescriptorProto_ExtensionRange, n *rangeNode) {
+	r.nodes[e] = n
+}
+
+func (r *parseResult) putMessageReservedRangeNode(rr *dpb.DescriptorProto_ReservedRange, n *rangeNode) {
+	r.nodes[rr] = n
+}
+
+func (r *parseResult) putEnumNode(e *dpb.EnumDescriptorProto, n *enumNode) {
+	r.nodes[e] = n
+}
+
+func (r *parseResult) putEnumValueNode(e *dpb.EnumValueDescriptorProto, n *enumValueNode) {
+	r.nodes[e] = n
+}
+
+func (r *parseResult) putEnumReservedRangeNode(rr *dpb.EnumDescriptorProto_EnumReservedRange, n *rangeNode) {
+	r.nodes[rr] = n
+}
+
+func (r *parseResult) putServiceNode(s *dpb.ServiceDescriptorProto, n *serviceNode) {
+	r.nodes[s] = n
+}
+
+func (r *parseResult) putMethodNode(m *dpb.MethodDescriptorProto, n *methodNode) {
+	r.nodes[m] = n
+}
+
+func parseProto(filename string, r io.Reader, validate bool) (*parseResult, error) {
+	lx := newLexer(r)
+	lx.filename = filename
+	protoParse(lx)
+	if lx.err != nil {
+		if _, ok := lx.err.(ErrorWithSourcePos); ok {
+			return nil, lx.err
+		} else {
+			return nil, ErrorWithSourcePos{Pos: lx.prev(), Underlying: lx.err}
+		}
+	}
+	// parser will not return an error if input is empty, so we
+	// need to also check if the result is non-nil
+	if lx.res == nil {
+		return nil, ErrorWithSourcePos{Pos: lx.prev(), Underlying: errors.New("input is empty")}
+	}
+
+	res, err := createParseResult(filename, lx.res)
+	if err != nil {
+		return nil, err
+	}
+	if validate {
+		if err := basicValidate(res); err != nil {
+			return nil, err
+		}
+	}
+	return res, nil
+}
+
+func createParseResult(filename string, file *fileNode) (*parseResult, error) {
+	res := &parseResult{
+		nodes:              map[proto.Message]node{},
+		interpretedOptions: map[*optionNode][]int32{},
+	}
+	err := res.createFileDescriptor(filename, file)
+	return res, err
+}
+
+func (r *parseResult) createFileDescriptor(filename string, file *fileNode) error {
+	fd := &dpb.FileDescriptorProto{Name: proto.String(filename)}
+	r.putFileNode(fd, file)
+
+	isProto3 := false
+	if file.syntax != nil {
+		isProto3 = file.syntax.syntax.val == "proto3"
+		// proto2 is the default, so no need to set unless proto3
+		if isProto3 {
+			fd.Syntax = proto.String(file.syntax.syntax.val)
+		}
+	}
+
+	for _, decl := range file.decls {
+		if decl.enum != nil {
+			fd.EnumType = append(fd.EnumType, r.asEnumDescriptor(decl.enum))
+		} else if decl.extend != nil {
+			r.addExtensions(decl.extend, &fd.Extension, &fd.MessageType, isProto3)
+		} else if decl.imp != nil {
+			file.imports = append(file.imports, decl.imp)
+			index := len(fd.Dependency)
+			fd.Dependency = append(fd.Dependency, decl.imp.name.val)
+			if decl.imp.public {
+				fd.PublicDependency = append(fd.PublicDependency, int32(index))
+			} else if decl.imp.weak {
+				fd.WeakDependency = append(fd.WeakDependency, int32(index))
+			}
+		} else if decl.message != nil {
+			fd.MessageType = append(fd.MessageType, r.asMessageDescriptor(decl.message, isProto3))
+		} else if decl.option != nil {
+			if fd.Options == nil {
+				fd.Options = &dpb.FileOptions{}
+			}
+			fd.Options.UninterpretedOption = append(fd.Options.UninterpretedOption, r.asUninterpretedOption(decl.option))
+		} else if decl.service != nil {
+			fd.Service = append(fd.Service, r.asServiceDescriptor(decl.service))
+		} else if decl.pkg != nil {
+			if fd.Package != nil {
+				return ErrorWithSourcePos{Pos: decl.pkg.start(), Underlying: errors.New("files should have only one package declaration")}
+			}
+			file.pkg = decl.pkg
+			fd.Package = proto.String(decl.pkg.name.val)
+		}
+	}
+	r.fd = fd
+	return nil
+}
+
+func (r *parseResult) asUninterpretedOptions(nodes []*optionNode) []*dpb.UninterpretedOption {
+	opts := make([]*dpb.UninterpretedOption, len(nodes))
+	for i, n := range nodes {
+		opts[i] = r.asUninterpretedOption(n)
+	}
+	return opts
+}
+
+func (r *parseResult) asUninterpretedOption(node *optionNode) *dpb.UninterpretedOption {
+	opt := &dpb.UninterpretedOption{Name: r.asUninterpretedOptionName(node.name.parts)}
+	r.putOptionNode(opt, node)
+
+	switch val := node.val.value().(type) {
+	case bool:
+		if val {
+			opt.IdentifierValue = proto.String("true")
+		} else {
+			opt.IdentifierValue = proto.String("false")
+		}
+	case int64:
+		opt.NegativeIntValue = proto.Int64(val)
+	case uint64:
+		opt.PositiveIntValue = proto.Uint64(val)
+	case float64:
+		opt.DoubleValue = proto.Float64(val)
+	case string:
+		opt.StringValue = []byte(val)
+	case identifier:
+		opt.IdentifierValue = proto.String(string(val))
+	case []*aggregateEntryNode:
+		var buf bytes.Buffer
+		aggToString(val, &buf)
+		aggStr := buf.String()
+		opt.AggregateValue = proto.String(aggStr)
+	}
+	return opt
+}
+
+func (r *parseResult) asUninterpretedOptionName(parts []*optionNamePartNode) []*dpb.UninterpretedOption_NamePart {
+	ret := make([]*dpb.UninterpretedOption_NamePart, len(parts))
+	for i, part := range parts {
+		txt := part.text.val
+		if !part.isExtension {
+			txt = part.text.val[part.offset : part.offset+part.length]
+		}
+		np := &dpb.UninterpretedOption_NamePart{
+			NamePart:    proto.String(txt),
+			IsExtension: proto.Bool(part.isExtension),
+		}
+		r.putOptionNamePartNode(np, part)
+		ret[i] = np
+	}
+	return ret
+}
+
+func (r *parseResult) addExtensions(ext *extendNode, flds *[]*dpb.FieldDescriptorProto, msgs *[]*dpb.DescriptorProto, isProto3 bool) {
+	extendee := ext.extendee.val
+	for _, decl := range ext.decls {
+		if decl.field != nil {
+			decl.field.extendee = ext
+			fd := r.asFieldDescriptor(decl.field)
+			fd.Extendee = proto.String(extendee)
+			*flds = append(*flds, fd)
+		} else if decl.group != nil {
+			decl.group.extendee = ext
+			fd, md := r.asGroupDescriptors(decl.group, isProto3)
+			fd.Extendee = proto.String(extendee)
+			*flds = append(*flds, fd)
+			*msgs = append(*msgs, md)
+		}
+	}
+}
+
+func asLabel(lbl *labelNode) *dpb.FieldDescriptorProto_Label {
+	if lbl == nil {
+		return nil
+	}
+	switch {
+	case lbl.repeated:
+		return dpb.FieldDescriptorProto_LABEL_REPEATED.Enum()
+	case lbl.required:
+		return dpb.FieldDescriptorProto_LABEL_REQUIRED.Enum()
+	default:
+		return dpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum()
+	}
+}
+
+func (r *parseResult) asFieldDescriptor(node *fieldNode) *dpb.FieldDescriptorProto {
+	fd := newFieldDescriptor(node.name.val, node.fldType.val, int32(node.tag.val), asLabel(node.label))
+	r.putFieldNode(fd, node)
+	if len(node.options) > 0 {
+		fd.Options = &dpb.FieldOptions{UninterpretedOption: r.asUninterpretedOptions(node.options)}
+	}
+	return fd
+}
+
+func newFieldDescriptor(name string, fieldType string, tag int32, lbl *dpb.FieldDescriptorProto_Label) *dpb.FieldDescriptorProto {
+	fd := &dpb.FieldDescriptorProto{
+		Name:     proto.String(name),
+		JsonName: proto.String(internal.JsonName(name)),
+		Number:   proto.Int32(tag),
+		Label:    lbl,
+	}
+	switch fieldType {
+	case "double":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_DOUBLE.Enum()
+	case "float":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_FLOAT.Enum()
+	case "int32":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_INT32.Enum()
+	case "int64":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_INT64.Enum()
+	case "uint32":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_UINT32.Enum()
+	case "uint64":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_UINT64.Enum()
+	case "sint32":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_SINT32.Enum()
+	case "sint64":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_SINT64.Enum()
+	case "fixed32":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_FIXED32.Enum()
+	case "fixed64":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_FIXED64.Enum()
+	case "sfixed32":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_SFIXED32.Enum()
+	case "sfixed64":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_SFIXED64.Enum()
+	case "bool":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_BOOL.Enum()
+	case "string":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_STRING.Enum()
+	case "bytes":
+		fd.Type = dpb.FieldDescriptorProto_TYPE_BYTES.Enum()
+	default:
+		// NB: we don't have enough info to determine whether this is an enum or a message type,
+		// so we'll change it to enum later once we can ascertain if it's an enum reference
+		fd.Type = dpb.FieldDescriptorProto_TYPE_MESSAGE.Enum()
+		fd.TypeName = proto.String(fieldType)
+	}
+	return fd
+}
+
+func (r *parseResult) asGroupDescriptors(group *groupNode, isProto3 bool) (*dpb.FieldDescriptorProto, *dpb.DescriptorProto) {
+	fieldName := strings.ToLower(group.name.val)
+	fd := &dpb.FieldDescriptorProto{
+		Name:     proto.String(fieldName),
+		JsonName: proto.String(internal.JsonName(fieldName)),
+		Number:   proto.Int32(int32(group.tag.val)),
+		Label:    asLabel(group.label),
+		Type:     dpb.FieldDescriptorProto_TYPE_GROUP.Enum(),
+		TypeName: proto.String(group.name.val),
+	}
+	r.putFieldNode(fd, group)
+	md := &dpb.DescriptorProto{Name: proto.String(group.name.val)}
+	r.putMessageNode(md, group)
+	r.addMessageDecls(md, &group.reserved, group.decls, isProto3)
+	return fd, md
+}
+
+func (r *parseResult) asMapDescriptors(mapField *mapFieldNode, isProto3 bool) (*dpb.FieldDescriptorProto, *dpb.DescriptorProto) {
+	var lbl *dpb.FieldDescriptorProto_Label
+	if !isProto3 {
+		lbl = dpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum()
+	}
+	keyFd := newFieldDescriptor("key", mapField.keyType.val, 1, lbl)
+	r.putFieldNode(keyFd, mapField.keyField())
+	valFd := newFieldDescriptor("value", mapField.valueType.val, 2, lbl)
+	r.putFieldNode(valFd, mapField.valueField())
+	entryName := internal.InitCap(internal.JsonName(mapField.name.val)) + "Entry"
+	fd := newFieldDescriptor(mapField.name.val, entryName, int32(mapField.tag.val), dpb.FieldDescriptorProto_LABEL_REPEATED.Enum())
+	if len(mapField.options) > 0 {
+		fd.Options = &dpb.FieldOptions{UninterpretedOption: r.asUninterpretedOptions(mapField.options)}
+	}
+	r.putFieldNode(fd, mapField)
+	md := &dpb.DescriptorProto{
+		Name:    proto.String(entryName),
+		Options: &dpb.MessageOptions{MapEntry: proto.Bool(true)},
+		Field:   []*dpb.FieldDescriptorProto{keyFd, valFd},
+	}
+	r.putMessageNode(md, mapField)
+	return fd, md
+}
+
+func (r *parseResult) asExtensionRanges(node *extensionRangeNode) []*dpb.DescriptorProto_ExtensionRange {
+	opts := r.asUninterpretedOptions(node.options)
+	ers := make([]*dpb.DescriptorProto_ExtensionRange, len(node.ranges))
+	for i, rng := range node.ranges {
+		er := &dpb.DescriptorProto_ExtensionRange{
+			Start: proto.Int32(rng.st),
+			End:   proto.Int32(rng.en + 1),
+		}
+		if len(opts) > 0 {
+			er.Options = &dpb.ExtensionRangeOptions{UninterpretedOption: opts}
+		}
+		r.putExtensionRangeNode(er, rng)
+		ers[i] = er
+	}
+	return ers
+}
+
+func (r *parseResult) asEnumValue(ev *enumValueNode) *dpb.EnumValueDescriptorProto {
+	var num int32
+	if ev.numberP != nil {
+		num = int32(ev.numberP.val)
+	} else {
+		num = int32(ev.numberN.val)
+	}
+	evd := &dpb.EnumValueDescriptorProto{Name: proto.String(ev.name.val), Number: proto.Int32(num)}
+	r.putEnumValueNode(evd, ev)
+	if len(ev.options) > 0 {
+		evd.Options = &dpb.EnumValueOptions{UninterpretedOption: r.asUninterpretedOptions(ev.options)}
+	}
+	return evd
+}
+
+func (r *parseResult) asMethodDescriptor(node *methodNode) *dpb.MethodDescriptorProto {
+	md := &dpb.MethodDescriptorProto{
+		Name:       proto.String(node.name.val),
+		InputType:  proto.String(node.input.msgType.val),
+		OutputType: proto.String(node.output.msgType.val),
+	}
+	r.putMethodNode(md, node)
+	if node.input.streamKeyword != nil {
+		md.ClientStreaming = proto.Bool(true)
+	}
+	if node.output.streamKeyword != nil {
+		md.ServerStreaming = proto.Bool(true)
+	}
+	// protoc always adds a MethodOptions if there are brackets
+	// We have a non-nil node.options if there are brackets
+	// We do the same to match protoc as closely as possible
+	// https://github.com/protocolbuffers/protobuf/blob/0c3f43a6190b77f1f68b7425d1b7e1a8257a8d0c/src/google/protobuf/compiler/parser.cc#L2152
+	if node.options != nil {
+		md.Options = &dpb.MethodOptions{UninterpretedOption: r.asUninterpretedOptions(node.options)}
+	}
+	return md
+}
+
+func (r *parseResult) asEnumDescriptor(en *enumNode) *dpb.EnumDescriptorProto {
+	ed := &dpb.EnumDescriptorProto{Name: proto.String(en.name.val)}
+	r.putEnumNode(ed, en)
+	for _, decl := range en.decls {
+		if decl.option != nil {
+			if ed.Options == nil {
+				ed.Options = &dpb.EnumOptions{}
+			}
+			ed.Options.UninterpretedOption = append(ed.Options.UninterpretedOption, r.asUninterpretedOption(decl.option))
+		} else if decl.value != nil {
+			ed.Value = append(ed.Value, r.asEnumValue(decl.value))
+		} else if decl.reserved != nil {
+			for _, n := range decl.reserved.names {
+				en.reserved = append(en.reserved, n)
+				ed.ReservedName = append(ed.ReservedName, n.val)
+			}
+			for _, rng := range decl.reserved.ranges {
+				ed.ReservedRange = append(ed.ReservedRange, r.asEnumReservedRange(rng))
+			}
+		}
+	}
+	return ed
+}
+
+func (r *parseResult) asEnumReservedRange(rng *rangeNode) *dpb.EnumDescriptorProto_EnumReservedRange {
+	rr := &dpb.EnumDescriptorProto_EnumReservedRange{
+		Start: proto.Int32(rng.st),
+		End:   proto.Int32(rng.en),
+	}
+	r.putEnumReservedRangeNode(rr, rng)
+	return rr
+}
+
+func (r *parseResult) asMessageDescriptor(node *messageNode, isProto3 bool) *dpb.DescriptorProto {
+	msgd := &dpb.DescriptorProto{Name: proto.String(node.name.val)}
+	r.putMessageNode(msgd, node)
+	r.addMessageDecls(msgd, &node.reserved, node.decls, isProto3)
+	return msgd
+}
+
+func (r *parseResult) addMessageDecls(msgd *dpb.DescriptorProto, reservedNames *[]*stringLiteralNode, decls []*messageElement, isProto3 bool) {
+	for _, decl := range decls {
+		if decl.enum != nil {
+			msgd.EnumType = append(msgd.EnumType, r.asEnumDescriptor(decl.enum))
+		} else if decl.extend != nil {
+			r.addExtensions(decl.extend, &msgd.Extension, &msgd.NestedType, isProto3)
+		} else if decl.extensionRange != nil {
+			msgd.ExtensionRange = append(msgd.ExtensionRange, r.asExtensionRanges(decl.extensionRange)...)
+		} else if decl.field != nil {
+			msgd.Field = append(msgd.Field, r.asFieldDescriptor(decl.field))
+		} else if decl.mapField != nil {
+			fd, md := r.asMapDescriptors(decl.mapField, isProto3)
+			msgd.Field = append(msgd.Field, fd)
+			msgd.NestedType = append(msgd.NestedType, md)
+		} else if decl.group != nil {
+			fd, md := r.asGroupDescriptors(decl.group, isProto3)
+			msgd.Field = append(msgd.Field, fd)
+			msgd.NestedType = append(msgd.NestedType, md)
+		} else if decl.oneOf != nil {
+			oodIndex := len(msgd.OneofDecl)
+			ood := &dpb.OneofDescriptorProto{Name: proto.String(decl.oneOf.name.val)}
+			r.putOneOfNode(ood, decl.oneOf)
+			msgd.OneofDecl = append(msgd.OneofDecl, ood)
+			for _, oodecl := range decl.oneOf.decls {
+				if oodecl.option != nil {
+					if ood.Options == nil {
+						ood.Options = &dpb.OneofOptions{}
+					}
+					ood.Options.UninterpretedOption = append(ood.Options.UninterpretedOption, r.asUninterpretedOption(oodecl.option))
+				} else if oodecl.field != nil {
+					fd := r.asFieldDescriptor(oodecl.field)
+					fd.OneofIndex = proto.Int32(int32(oodIndex))
+					msgd.Field = append(msgd.Field, fd)
+				}
+			}
+		} else if decl.option != nil {
+			if msgd.Options == nil {
+				msgd.Options = &dpb.MessageOptions{}
+			}
+			msgd.Options.UninterpretedOption = append(msgd.Options.UninterpretedOption, r.asUninterpretedOption(decl.option))
+		} else if decl.nested != nil {
+			msgd.NestedType = append(msgd.NestedType, r.asMessageDescriptor(decl.nested, isProto3))
+		} else if decl.reserved != nil {
+			for _, n := range decl.reserved.names {
+				*reservedNames = append(*reservedNames, n)
+				msgd.ReservedName = append(msgd.ReservedName, n.val)
+			}
+			for _, rng := range decl.reserved.ranges {
+				msgd.ReservedRange = append(msgd.ReservedRange, r.asMessageReservedRange(rng))
+			}
+		}
+	}
+}
+
+func (r *parseResult) asMessageReservedRange(rng *rangeNode) *dpb.DescriptorProto_ReservedRange {
+	rr := &dpb.DescriptorProto_ReservedRange{
+		Start: proto.Int32(rng.st),
+		End:   proto.Int32(rng.en + 1),
+	}
+	r.putMessageReservedRangeNode(rr, rng)
+	return rr
+}
+
+func (r *parseResult) asServiceDescriptor(svc *serviceNode) *dpb.ServiceDescriptorProto {
+	sd := &dpb.ServiceDescriptorProto{Name: proto.String(svc.name.val)}
+	r.putServiceNode(sd, svc)
+	for _, decl := range svc.decls {
+		if decl.option != nil {
+			if sd.Options == nil {
+				sd.Options = &dpb.ServiceOptions{}
+			}
+			sd.Options.UninterpretedOption = append(sd.Options.UninterpretedOption, r.asUninterpretedOption(decl.option))
+		} else if decl.rpc != nil {
+			sd.Method = append(sd.Method, r.asMethodDescriptor(decl.rpc))
+		}
+	}
+	return sd
+}
+
+func toNameParts(ident *identNode, offset int) []*optionNamePartNode {
+	parts := strings.Split(ident.val[offset:], ".")
+	ret := make([]*optionNamePartNode, len(parts))
+	for i, p := range parts {
+		ret[i] = &optionNamePartNode{text: ident, offset: offset, length: len(p)}
+		ret[i].setRange(ident, ident)
+		offset += len(p) + 1
+	}
+	return ret
+}
+
+func checkUint64InInt32Range(lex protoLexer, pos *SourcePos, v uint64) {
+	if v > math.MaxInt32 {
+		lexError(lex, pos, fmt.Sprintf("constant %d is out of range for int32 (%d to %d)", v, math.MinInt32, math.MaxInt32))
+	}
+}
+
+func checkInt64InInt32Range(lex protoLexer, pos *SourcePos, v int64) {
+	if v > math.MaxInt32 || v < math.MinInt32 {
+		lexError(lex, pos, fmt.Sprintf("constant %d is out of range for int32 (%d to %d)", v, math.MinInt32, math.MaxInt32))
+	}
+}
+
+func checkTag(lex protoLexer, pos *SourcePos, v uint64) {
+	if v > internal.MaxTag {
+		lexError(lex, pos, fmt.Sprintf("tag number %d is higher than max allowed tag number (%d)", v, internal.MaxTag))
+	} else if v >= internal.SpecialReservedStart && v <= internal.SpecialReservedEnd {
+		lexError(lex, pos, fmt.Sprintf("tag number %d is in disallowed reserved range %d-%d", v, internal.SpecialReservedStart, internal.SpecialReservedEnd))
+	}
+}
+
+func aggToString(agg []*aggregateEntryNode, buf *bytes.Buffer) {
+	buf.WriteString("{")
+	for _, a := range agg {
+		buf.WriteString(" ")
+		buf.WriteString(a.name.value())
+		if v, ok := a.val.(*aggregateLiteralNode); ok {
+			aggToString(v.elements, buf)
+		} else {
+			buf.WriteString(": ")
+			elementToString(a.val.value(), buf)
+		}
+	}
+	buf.WriteString(" }")
+}
+
+func elementToString(v interface{}, buf *bytes.Buffer) {
+	switch v := v.(type) {
+	case bool, int64, uint64, identifier:
+		fmt.Fprintf(buf, "%v", v)
+	case float64:
+		if math.IsInf(v, 1) {
+			buf.WriteString(": inf")
+		} else if math.IsInf(v, -1) {
+			buf.WriteString(": -inf")
+		} else if math.IsNaN(v) {
+			buf.WriteString(": nan")
+		} else {
+			fmt.Fprintf(buf, ": %v", v)
+		}
+	case string:
+		buf.WriteRune('"')
+		writeEscapedBytes(buf, []byte(v))
+		buf.WriteRune('"')
+	case []valueNode:
+		buf.WriteString(": [")
+		first := true
+		for _, e := range v {
+			if first {
+				first = false
+			} else {
+				buf.WriteString(", ")
+			}
+			elementToString(e.value(), buf)
+		}
+		buf.WriteString("]")
+	case []*aggregateEntryNode:
+		aggToString(v, buf)
+	}
+}
+
+func writeEscapedBytes(buf *bytes.Buffer, b []byte) {
+	for _, c := range b {
+		switch c {
+		case '\n':
+			buf.WriteString("\\n")
+		case '\r':
+			buf.WriteString("\\r")
+		case '\t':
+			buf.WriteString("\\t")
+		case '"':
+			buf.WriteString("\\\"")
+		case '\'':
+			buf.WriteString("\\'")
+		case '\\':
+			buf.WriteString("\\\\")
+		default:
+			if c >= 0x20 && c <= 0x7f && c != '"' && c != '\\' {
+				// simple printable characters
+				buf.WriteByte(c)
+			} else {
+				// use octal escape for all other values
+				buf.WriteRune('\\')
+				buf.WriteByte('0' + ((c >> 6) & 0x7))
+				buf.WriteByte('0' + ((c >> 3) & 0x7))
+				buf.WriteByte('0' + (c & 0x7))
+			}
+		}
+	}
+}
+
+func basicValidate(res *parseResult) error {
+	fd := res.fd
+	isProto3 := fd.GetSyntax() == "proto3"
+
+	for _, md := range fd.MessageType {
+		if err := validateMessage(res, isProto3, "", md); err != nil {
+			return err
+		}
+	}
+
+	for _, ed := range fd.EnumType {
+		if err := validateEnum(res, isProto3, "", ed); err != nil {
+			return err
+		}
+	}
+
+	for _, fld := range fd.Extension {
+		if err := validateField(res, isProto3, "", fld); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func validateMessage(res *parseResult, isProto3 bool, prefix string, md *dpb.DescriptorProto) error {
+	nextPrefix := md.GetName() + "."
+
+	for _, fld := range md.Field {
+		if err := validateField(res, isProto3, nextPrefix, fld); err != nil {
+			return err
+		}
+	}
+	for _, fld := range md.Extension {
+		if err := validateField(res, isProto3, nextPrefix, fld); err != nil {
+			return err
+		}
+	}
+	for _, ed := range md.EnumType {
+		if err := validateEnum(res, isProto3, nextPrefix, ed); err != nil {
+			return err
+		}
+	}
+	for _, nmd := range md.NestedType {
+		if err := validateMessage(res, isProto3, nextPrefix, nmd); err != nil {
+			return err
+		}
+	}
+
+	scope := fmt.Sprintf("message %s%s", prefix, md.GetName())
+
+	if isProto3 && len(md.ExtensionRange) > 0 {
+		n := res.getExtensionRangeNode(md.ExtensionRange[0])
+		return ErrorWithSourcePos{Pos: n.start(), Underlying: fmt.Errorf("%s: extension ranges are not allowed in proto3", scope)}
+	}
+
+	if index, err := findOption(res, scope, md.Options.GetUninterpretedOption(), "map_entry"); err != nil {
+		return err
+	} else if index >= 0 {
+		opt := md.Options.UninterpretedOption[index]
+		optn := res.getOptionNode(opt)
+		md.Options.UninterpretedOption = removeOption(md.Options.UninterpretedOption, index)
+		valid := false
+		if opt.IdentifierValue != nil {
+			if opt.GetIdentifierValue() == "true" {
+				return ErrorWithSourcePos{Pos: optn.getValue().start(), Underlying: fmt.Errorf("%s: map_entry option should not be set explicitly; use map type instead", scope)}
+			} else if opt.GetIdentifierValue() == "false" {
+				md.Options.MapEntry = proto.Bool(false)
+				valid = true
+			}
+		}
+		if !valid {
+			return ErrorWithSourcePos{Pos: optn.getValue().start(), Underlying: fmt.Errorf("%s: expecting bool value for map_entry option", scope)}
+		}
+	}
+
+	// reserved ranges should not overlap
+	rsvd := make(tagRanges, len(md.ReservedRange))
+	for i, r := range md.ReservedRange {
+		n := res.getMessageReservedRangeNode(r)
+		rsvd[i] = tagRange{start: r.GetStart(), end: r.GetEnd(), node: n}
+
+	}
+	sort.Sort(rsvd)
+	for i := 1; i < len(rsvd); i++ {
+		if rsvd[i].start < rsvd[i-1].end {
+			return ErrorWithSourcePos{Pos: rsvd[i].node.start(), Underlying: fmt.Errorf("%s: reserved ranges overlap: %d to %d and %d to %d", scope, rsvd[i-1].start, rsvd[i-1].end-1, rsvd[i].start, rsvd[i].end-1)}
+		}
+	}
+
+	// extensions ranges should not overlap
+	exts := make(tagRanges, len(md.ExtensionRange))
+	for i, r := range md.ExtensionRange {
+		n := res.getExtensionRangeNode(r)
+		exts[i] = tagRange{start: r.GetStart(), end: r.GetEnd(), node: n}
+	}
+	sort.Sort(exts)
+	for i := 1; i < len(exts); i++ {
+		if exts[i].start < exts[i-1].end {
+			return ErrorWithSourcePos{Pos: exts[i].node.start(), Underlying: fmt.Errorf("%s: extension ranges overlap: %d to %d and %d to %d", scope, exts[i-1].start, exts[i-1].end-1, exts[i].start, exts[i].end-1)}
+		}
+	}
+
+	// see if any extension range overlaps any reserved range
+	var i, j int // i indexes rsvd; j indexes exts
+	for i < len(rsvd) && j < len(exts) {
+		if rsvd[i].start >= exts[j].start && rsvd[i].start < exts[j].end ||
+			exts[j].start >= rsvd[i].start && exts[j].start < rsvd[i].end {
+
+			var pos *SourcePos
+			if rsvd[i].start >= exts[j].start && rsvd[i].start < exts[j].end {
+				pos = rsvd[i].node.start()
+			} else {
+				pos = exts[j].node.start()
+			}
+			// ranges overlap
+			return ErrorWithSourcePos{Pos: pos, Underlying: fmt.Errorf("%s: extension range %d to %d overlaps reserved range %d to %d", scope, exts[j].start, exts[j].end-1, rsvd[i].start, rsvd[i].end-1)}
+		}
+		if rsvd[i].start < exts[j].start {
+			i++
+		} else {
+			j++
+		}
+	}
+
+	// now, check that fields don't re-use tags and don't try to use extension
+	// or reserved ranges or reserved names
+	rsvdNames := map[string]struct{}{}
+	for _, n := range md.ReservedName {
+		rsvdNames[n] = struct{}{}
+	}
+	fieldTags := map[int32]string{}
+	for _, fld := range md.Field {
+		fn := res.getFieldNode(fld)
+		if _, ok := rsvdNames[fld.GetName()]; ok {
+			return ErrorWithSourcePos{Pos: fn.fieldName().start(), Underlying: fmt.Errorf("%s: field %s is using a reserved name", scope, fld.GetName())}
+		}
+		if existing := fieldTags[fld.GetNumber()]; existing != "" {
+			return ErrorWithSourcePos{Pos: fn.fieldTag().start(), Underlying: fmt.Errorf("%s: fields %s and %s both have the same tag %d", scope, existing, fld.GetName(), fld.GetNumber())}
+		}
+		fieldTags[fld.GetNumber()] = fld.GetName()
+		// check reserved ranges
+		r := sort.Search(len(rsvd), func(index int) bool { return rsvd[index].end > fld.GetNumber() })
+		if r < len(rsvd) && rsvd[r].start <= fld.GetNumber() {
+			return ErrorWithSourcePos{Pos: fn.fieldTag().start(), Underlying: fmt.Errorf("%s: field %s is using tag %d which is in reserved range %d to %d", scope, fld.GetName(), fld.GetNumber(), rsvd[r].start, rsvd[r].end-1)}
+		}
+		// and check extension ranges
+		e := sort.Search(len(exts), func(index int) bool { return exts[index].end > fld.GetNumber() })
+		if e < len(exts) && exts[e].start <= fld.GetNumber() {
+			return ErrorWithSourcePos{Pos: fn.fieldTag().start(), Underlying: fmt.Errorf("%s: field %s is using tag %d which is in extension range %d to %d", scope, fld.GetName(), fld.GetNumber(), exts[e].start, exts[e].end-1)}
+		}
+	}
+
+	return nil
+}
+
+func validateEnum(res *parseResult, isProto3 bool, prefix string, ed *dpb.EnumDescriptorProto) error {
+	scope := fmt.Sprintf("enum %s%s", prefix, ed.GetName())
+
+	if index, err := findOption(res, scope, ed.Options.GetUninterpretedOption(), "allow_alias"); err != nil {
+		return err
+	} else if index >= 0 {
+		opt := ed.Options.UninterpretedOption[index]
+		ed.Options.UninterpretedOption = removeOption(ed.Options.UninterpretedOption, index)
+		valid := false
+		if opt.IdentifierValue != nil {
+			if opt.GetIdentifierValue() == "true" {
+				ed.Options.AllowAlias = proto.Bool(true)
+				valid = true
+			} else if opt.GetIdentifierValue() == "false" {
+				ed.Options.AllowAlias = proto.Bool(false)
+				valid = true
+			}
+		}
+		if !valid {
+			optNode := res.getOptionNode(opt)
+			return ErrorWithSourcePos{Pos: optNode.getValue().start(), Underlying: fmt.Errorf("%s: expecting bool value for allow_alias option", scope)}
+		}
+	}
+
+	if isProto3 && ed.Value[0].GetNumber() != 0 {
+		evNode := res.getEnumValueNode(ed.Value[0])
+		return ErrorWithSourcePos{Pos: evNode.getNumber().start(), Underlying: fmt.Errorf("%s: proto3 requires that first value in enum have numeric value of 0", scope)}
+	}
+
+	if !ed.Options.GetAllowAlias() {
+		// make sure all value numbers are distinct
+		vals := map[int32]string{}
+		for _, evd := range ed.Value {
+			if existing := vals[evd.GetNumber()]; existing != "" {
+				evNode := res.getEnumValueNode(evd)
+				return ErrorWithSourcePos{Pos: evNode.getNumber().start(), Underlying: fmt.Errorf("%s: values %s and %s both have the same numeric value %d; use allow_alias option if intentional", scope, existing, evd.GetName(), evd.GetNumber())}
+			}
+			vals[evd.GetNumber()] = evd.GetName()
+		}
+	}
+
+	// reserved ranges should not overlap
+	rsvd := make(tagRanges, len(ed.ReservedRange))
+	for i, r := range ed.ReservedRange {
+		n := res.getEnumReservedRangeNode(r)
+		rsvd[i] = tagRange{start: r.GetStart(), end: r.GetEnd(), node: n}
+	}
+	sort.Sort(rsvd)
+	for i := 1; i < len(rsvd); i++ {
+		if rsvd[i].start <= rsvd[i-1].end {
+			return ErrorWithSourcePos{Pos: rsvd[i].node.start(), Underlying: fmt.Errorf("%s: reserved ranges overlap: %d to %d and %d to %d", scope, rsvd[i-1].start, rsvd[i-1].end, rsvd[i].start, rsvd[i].end)}
+		}
+	}
+
+	// now, check that fields don't re-use tags and don't try to use extension
+	// or reserved ranges or reserved names
+	rsvdNames := map[string]struct{}{}
+	for _, n := range ed.ReservedName {
+		rsvdNames[n] = struct{}{}
+	}
+	for _, ev := range ed.Value {
+		evn := res.getEnumValueNode(ev)
+		if _, ok := rsvdNames[ev.GetName()]; ok {
+			return ErrorWithSourcePos{Pos: evn.getName().start(), Underlying: fmt.Errorf("%s: value %s is using a reserved name", scope, ev.GetName())}
+		}
+		// check reserved ranges
+		r := sort.Search(len(rsvd), func(index int) bool { return rsvd[index].end >= ev.GetNumber() })
+		if r < len(rsvd) && rsvd[r].start <= ev.GetNumber() {
+			return ErrorWithSourcePos{Pos: evn.getNumber().start(), Underlying: fmt.Errorf("%s: value %s is using number %d which is in reserved range %d to %d", scope, ev.GetName(), ev.GetNumber(), rsvd[r].start, rsvd[r].end)}
+		}
+	}
+
+	return nil
+}
+
+func validateField(res *parseResult, isProto3 bool, prefix string, fld *dpb.FieldDescriptorProto) error {
+	scope := fmt.Sprintf("field %s%s", prefix, fld.GetName())
+
+	node := res.getFieldNode(fld)
+	if isProto3 {
+		if fld.GetType() == dpb.FieldDescriptorProto_TYPE_GROUP {
+			n := node.(*groupNode)
+			return ErrorWithSourcePos{Pos: n.groupKeyword.start(), Underlying: fmt.Errorf("%s: groups are not allowed in proto3", scope)}
+		}
+		if fld.Label != nil && fld.GetLabel() != dpb.FieldDescriptorProto_LABEL_REPEATED {
+			return ErrorWithSourcePos{Pos: node.fieldLabel().start(), Underlying: fmt.Errorf("%s: field has label %v, but proto3 should omit labels other than 'repeated'", scope, fld.GetLabel())}
+		}
+		if index, err := findOption(res, scope, fld.Options.GetUninterpretedOption(), "default"); err != nil {
+			return err
+		} else if index >= 0 {
+			optNode := res.getOptionNode(fld.Options.GetUninterpretedOption()[index])
+			return ErrorWithSourcePos{Pos: optNode.getName().start(), Underlying: fmt.Errorf("%s: default values are not allowed in proto3", scope)}
+		}
+	} else {
+		if fld.Label == nil && fld.OneofIndex == nil {
+			return ErrorWithSourcePos{Pos: node.fieldName().start(), Underlying: fmt.Errorf("%s: field has no label, but proto2 must indicate 'optional' or 'required'", scope)}
+		}
+		if fld.GetExtendee() != "" && fld.Label != nil && fld.GetLabel() == dpb.FieldDescriptorProto_LABEL_REQUIRED {
+			return ErrorWithSourcePos{Pos: node.fieldLabel().start(), Underlying: fmt.Errorf("%s: extension fields cannot be 'required'", scope)}
+		}
+	}
+
+	// finally, set any missing label to optional
+	if fld.Label == nil {
+		fld.Label = dpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum()
+	}
+	return nil
+}
+
+func findOption(res *parseResult, scope string, opts []*dpb.UninterpretedOption, name string) (int, error) {
+	found := -1
+	for i, opt := range opts {
+		if len(opt.Name) != 1 {
+			continue
+		}
+		if opt.Name[0].GetIsExtension() || opt.Name[0].GetNamePart() != name {
+			continue
+		}
+		if found >= 0 {
+			optNode := res.getOptionNode(opt)
+			return -1, ErrorWithSourcePos{Pos: optNode.getName().start(), Underlying: fmt.Errorf("%s: option %s cannot be defined more than once", scope, name)}
+		}
+		found = i
+	}
+	return found, nil
+}
+
+func removeOption(uo []*dpb.UninterpretedOption, indexToRemove int) []*dpb.UninterpretedOption {
+	if indexToRemove == 0 {
+		return uo[1:]
+	} else if int(indexToRemove) == len(uo)-1 {
+		return uo[:len(uo)-1]
+	} else {
+		return append(uo[:indexToRemove], uo[indexToRemove+1:]...)
+	}
+}
+
+type tagRange struct {
+	start int32
+	end   int32
+	node  rangeDecl
+}
+
+type tagRanges []tagRange
+
+func (r tagRanges) Len() int {
+	return len(r)
+}
+
+func (r tagRanges) Less(i, j int) bool {
+	return r[i].start < r[j].start ||
+		(r[i].start == r[j].start && r[i].end < r[j].end)
+}
+
+func (r tagRanges) Swap(i, j int) {
+	r[i], r[j] = r[j], r[i]
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y
new file mode 100644
index 0000000..faf49d9
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y
@@ -0,0 +1,937 @@
+%{
+package protoparse
+
+//lint:file-ignore SA4006 generated parser has unused values
+
+import (
+	"fmt"
+	"math"
+	"unicode"
+
+	"github.com/jhump/protoreflect/desc/internal"
+)
+
+%}
+
+// fields inside this union end up as the fields in a structure known
+// as ${PREFIX}SymType, of which a reference is passed to the lexer.
+%union{
+	file      *fileNode
+	fileDecls []*fileElement
+	syn       *syntaxNode
+	pkg       *packageNode
+	imprt     *importNode
+	msg       *messageNode
+	msgDecls  []*messageElement
+	fld       *fieldNode
+	mapFld    *mapFieldNode
+	grp       *groupNode
+	oo        *oneOfNode
+	ooDecls   []*oneOfElement
+	ext       *extensionRangeNode
+	resvd     *reservedNode
+	en        *enumNode
+	enDecls   []*enumElement
+	env       *enumValueNode
+	extend    *extendNode
+	extDecls  []*extendElement
+	svc       *serviceNode
+	svcDecls  []*serviceElement
+	mtd       *methodNode
+	rpcType   *rpcTypeNode
+	opts      []*optionNode
+	optNm     []*optionNamePartNode
+	rngs      []*rangeNode
+	names     []*stringLiteralNode
+	sl        []valueNode
+	agg       []*aggregateEntryNode
+	aggName   *aggregateNameNode
+	v         valueNode
+	str       *stringLiteralNode
+	i         *negativeIntLiteralNode
+	ui        *intLiteralNode
+	f         *floatLiteralNode
+	id        *identNode
+	b         *basicNode
+	err       error
+}
+
+// any non-terminal which returns a value needs a type, which is
+// really a field name in the above union struct
+%type <file>      file
+%type <syn>       syntax
+%type <fileDecls> fileDecl fileDecls
+%type <imprt>     import
+%type <pkg>       package
+%type <opts>      option fieldOption fieldOptions rpcOption rpcOptions
+%type <optNm>     optionName optionNameRest optionNameComponent
+%type <v>         constant scalarConstant aggregate
+%type <id>        name ident typeIdent keyType
+%type <aggName>   aggName
+%type <i>         negIntLit
+%type <ui>        intLit
+%type <f>         floatLit
+%type <sl>        constantList
+%type <agg>       aggFields aggField aggFieldEntry
+%type <fld>       field oneofField
+%type <oo>        oneof
+%type <grp>       group
+%type <mapFld>    mapField
+%type <msg>       message
+%type <msgDecls>  messageItem messageBody
+%type <ooDecls>   oneofItem oneofBody
+%type <names>     fieldNames
+%type <resvd>     msgReserved enumReserved reservedNames
+%type <rngs>      tagRange tagRanges enumRange enumRanges
+%type <ext>       extensions
+%type <en>        enum
+%type <enDecls>   enumItem enumBody
+%type <env>       enumField
+%type <extend>    extend
+%type <extDecls>  extendItem extendBody
+%type <str>       stringLit
+%type <svc>       service
+%type <svcDecls>  serviceItem serviceBody
+%type <mtd>       rpc
+%type <rpcType>   rpcType
+
+// same for terminals
+%token <str> _STRING_LIT
+%token <ui>  _INT_LIT
+%token <f>   _FLOAT_LIT
+%token <id>  _NAME _FQNAME _TYPENAME
+%token <id>  _SYNTAX _IMPORT _WEAK _PUBLIC _PACKAGE _OPTION _TRUE _FALSE _INF _NAN _REPEATED _OPTIONAL _REQUIRED
+%token <id>  _DOUBLE _FLOAT _INT32 _INT64 _UINT32 _UINT64 _SINT32 _SINT64 _FIXED32 _FIXED64 _SFIXED32 _SFIXED64
+%token <id>  _BOOL _STRING _BYTES _GROUP _ONEOF _MAP _EXTENSIONS _TO _MAX _RESERVED _ENUM _MESSAGE _EXTEND
+%token <id>  _SERVICE _RPC _STREAM _RETURNS
+%token <err> _ERROR
+// we define all of these, even ones that aren't used, to improve error messages
+// so it shows the unexpected symbol instead of showing "$unk"
+%token <b>   '=' ';' ':' '{' '}' '\\' '/' '?' '.' ',' '>' '<' '+' '-' '(' ')' '[' ']' '*' '&' '^' '%' '$' '#' '@' '!' '~' '`'
+
+%%
+
+file : syntax {
+		$$ = &fileNode{syntax: $1}
+		$$.setRange($1, $1)
+		protolex.(*protoLex).res = $$
+	}
+	| fileDecls  {
+		$$ = &fileNode{decls: $1}
+		if len($1) > 0 {
+			$$.setRange($1[0], $1[len($1)-1])
+		}
+		protolex.(*protoLex).res = $$
+	}
+	| syntax fileDecls {
+		$$ = &fileNode{syntax: $1, decls: $2}
+		var end node
+		if len($2) > 0 {
+			end = $2[len($2)-1]
+		} else {
+			end = $1
+		}
+		$$.setRange($1, end)
+		protolex.(*protoLex).res = $$
+	}
+	| {
+	}
+
+fileDecls : fileDecls fileDecl {
+		$$ = append($1, $2...)
+	}
+	| fileDecl
+
+fileDecl : import {
+		$$ = []*fileElement{{imp: $1}}
+	}
+	| package {
+		$$ = []*fileElement{{pkg: $1}}
+	}
+	| option {
+		$$ = []*fileElement{{option: $1[0]}}
+	}
+	| message {
+		$$ = []*fileElement{{message: $1}}
+	}
+	| enum {
+		$$ = []*fileElement{{enum: $1}}
+	}
+	| extend {
+		$$ = []*fileElement{{extend: $1}}
+	}
+	| service {
+		$$ = []*fileElement{{service: $1}}
+	}
+	| ';' {
+		$$ = []*fileElement{{empty: $1}}
+	}
+
+syntax : _SYNTAX '=' stringLit ';' {
+		if $3.val != "proto2" && $3.val != "proto3" {
+			lexError(protolex, $3.start(), "syntax value must be 'proto2' or 'proto3'")
+		}
+		$$ = &syntaxNode{syntax: $3}
+		$$.setRange($1, $4)
+	}
+
+import : _IMPORT stringLit ';' {
+		$$ = &importNode{ name: $2 }
+		$$.setRange($1, $3)
+	}
+	| _IMPORT _WEAK stringLit ';' {
+		$$ = &importNode{ name: $3, weak: true }
+		$$.setRange($1, $4)
+	}
+	| _IMPORT _PUBLIC stringLit ';' {
+		$$ = &importNode{ name: $3, public: true }
+		$$.setRange($1, $4)
+	}
+
+package : _PACKAGE ident ';' {
+		$$ = &packageNode{name: $2}
+		$$.setRange($1, $3)
+	}
+
+ident : name
+	| _FQNAME
+
+option : _OPTION optionName '=' constant ';' {
+		n := &optionNameNode{parts: $2}
+		n.setRange($2[0], $2[len($2)-1])
+		o := &optionNode{name: n, val: $4}
+		o.setRange($1, $5)
+		$$ = []*optionNode{o}
+	}
+
+optionName : ident {
+		$$ = toNameParts($1, 0)
+	}
+	| '(' typeIdent ')' {
+		p := &optionNamePartNode{text: $2, isExtension: true}
+		p.setRange($1, $3)
+		$$ = []*optionNamePartNode{p}
+	}
+	| '(' typeIdent ')' optionNameRest {
+		p := &optionNamePartNode{text: $2, isExtension: true}
+		p.setRange($1, $3)
+		ps := make([]*optionNamePartNode, 1, len($4)+1)
+		ps[0] = p
+		$$ = append(ps, $4...)
+	}
+
+optionNameRest : optionNameComponent
+	| optionNameComponent optionNameRest {
+		$$ = append($1, $2...)
+	}
+
+optionNameComponent : _TYPENAME {
+		$$ = toNameParts($1, 1 /* exclude leading dot */)
+	}
+	| '.' '(' typeIdent ')' {
+		p := &optionNamePartNode{text: $3, isExtension: true}
+		p.setRange($2, $4)
+		$$ = []*optionNamePartNode{p}
+	}
+
+constant : scalarConstant
+	| aggregate
+
+scalarConstant : stringLit {
+		$$ = $1
+	}
+	| intLit {
+		$$ = $1
+	}
+	| negIntLit {
+		$$ = $1
+	}
+	| floatLit {
+		$$ = $1
+	}
+	| name {
+		if $1.val == "true" {
+			$$ = &boolLiteralNode{basicNode: $1.basicNode, val: true}
+		} else if $1.val == "false" {
+			$$ = &boolLiteralNode{basicNode: $1.basicNode, val: false}
+		} else if $1.val == "inf" {
+			f := &floatLiteralNode{val: math.Inf(1)}
+			f.setRange($1, $1)
+			$$ = f
+		} else if $1.val == "nan" {
+			f := &floatLiteralNode{val: math.NaN()}
+			f.setRange($1, $1)
+			$$ = f
+		} else {
+			$$ = $1
+		}
+	}
+
+intLit : _INT_LIT
+	| '+' _INT_LIT {
+		$$ = $2
+	}
+
+negIntLit : '-' _INT_LIT {
+		if $2.val > math.MaxInt64 + 1 {
+			lexError(protolex, $2.start(), fmt.Sprintf("numeric constant %d would underflow (allowed range is %d to %d)", $2.val, int64(math.MinInt64), int64(math.MaxInt64)))
+		}
+		$$ = &negativeIntLiteralNode{val: -int64($2.val)}
+		$$.setRange($1, $2)
+	}
+
+floatLit : _FLOAT_LIT
+	| '-' _FLOAT_LIT {
+		$$ = &floatLiteralNode{val: -$2.val}
+		$$.setRange($1, $2)
+	}
+	| '+' _FLOAT_LIT {
+		$$ = &floatLiteralNode{val: $2.val}
+		$$.setRange($1, $2)
+	}
+	| '+' _INF {
+		$$ = &floatLiteralNode{val: math.Inf(1)}
+		$$.setRange($1, $2)
+	}
+	| '-' _INF {
+		$$ = &floatLiteralNode{val: math.Inf(-1)}
+		$$.setRange($1, $2)
+	}
+
+stringLit : _STRING_LIT
+    | stringLit _STRING_LIT {
+        $$ = &stringLiteralNode{val: $1.val + $2.val}
+        $$.setRange($1, $2)
+    }
+
+aggregate : '{' aggFields '}' {
+		a := &aggregateLiteralNode{elements: $2}
+		a.setRange($1, $3)
+		$$ = a
+	}
+
+aggFields : aggField
+	| aggFields aggField {
+		$$ = append($1, $2...)
+	}
+	| {
+		$$ = nil
+	}
+
+aggField : aggFieldEntry
+	| aggFieldEntry ',' {
+		$$ = $1
+	}
+	| aggFieldEntry ';' {
+		$$ = $1
+	}
+
+aggFieldEntry : aggName ':' scalarConstant {
+		a := &aggregateEntryNode{name: $1, val: $3}
+		a.setRange($1, $3)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName ':' '[' ']' {
+		s := &sliceLiteralNode{}
+		s.setRange($3, $4)
+		a := &aggregateEntryNode{name: $1, val: s}
+		a.setRange($1, $4)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName ':' '[' constantList ']' {
+		s := &sliceLiteralNode{elements: $4}
+		s.setRange($3, $5)
+		a := &aggregateEntryNode{name: $1, val: s}
+		a.setRange($1, $5)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName ':' aggregate {
+		a := &aggregateEntryNode{name: $1, val: $3}
+		a.setRange($1, $3)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName aggregate {
+		a := &aggregateEntryNode{name: $1, val: $2}
+		a.setRange($1, $2)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName ':' '<' aggFields '>' {
+		s := &aggregateLiteralNode{elements: $4}
+		s.setRange($3, $5)
+		a := &aggregateEntryNode{name: $1, val: s}
+		a.setRange($1, $5)
+		$$ = []*aggregateEntryNode{a}
+	}
+	| aggName '<' aggFields '>' {
+		s := &aggregateLiteralNode{elements: $3}
+		s.setRange($2, $4)
+		a := &aggregateEntryNode{name: $1, val: s}
+		a.setRange($1, $4)
+		$$ = []*aggregateEntryNode{a}
+	}
+
+aggName : name {
+		$$ = &aggregateNameNode{name: $1}
+		$$.setRange($1, $1)
+	}
+	| '[' ident ']' {
+		$$ = &aggregateNameNode{name: $2, isExtension: true}
+		$$.setRange($1, $3)
+	}
+
+constantList : constant {
+		$$ = []valueNode{$1}
+	}
+	| constantList ',' constant {
+		$$ = append($1, $3)
+	}
+	| constantList ';' constant {
+		$$ = append($1, $3)
+	}
+	| '<' aggFields '>' {
+		s := &aggregateLiteralNode{elements: $2}
+		s.setRange($1, $3)
+		$$ = []valueNode{s}
+	}
+	| constantList ','  '<' aggFields '>' {
+		s := &aggregateLiteralNode{elements: $4}
+		s.setRange($3, $5)
+		$$ = append($1, s)
+	}
+	| constantList ';'  '<' aggFields '>' {
+		s := &aggregateLiteralNode{elements: $4}
+		s.setRange($3, $5)
+		$$ = append($1, s)
+	}
+
+typeIdent : ident
+	| _TYPENAME
+
+field : _REQUIRED typeIdent name '=' _INT_LIT ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode, required: true}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5}
+		$$.setRange($1, $6)
+	}
+	| _OPTIONAL typeIdent name '=' _INT_LIT ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5}
+		$$.setRange($1, $6)
+	}
+	| _REPEATED typeIdent name '=' _INT_LIT ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode, repeated: true}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5}
+		$$.setRange($1, $6)
+	}
+	| typeIdent name '=' _INT_LIT ';' {
+		checkTag(protolex, $4.start(), $4.val)
+		$$ = &fieldNode{fldType: $1, name: $2, tag: $4}
+		$$.setRange($1, $5)
+	}
+	| _REQUIRED typeIdent name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode, required: true}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5, options: $7}
+		$$.setRange($1, $9)
+	}
+	| _OPTIONAL typeIdent name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5, options: $7}
+		$$.setRange($1, $9)
+	}
+	| _REPEATED typeIdent name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $5.start(), $5.val)
+		lbl := &labelNode{basicNode: $1.basicNode, repeated: true}
+		$$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5, options: $7}
+		$$.setRange($1, $9)
+	}
+	| typeIdent name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $4.start(), $4.val)
+		$$ = &fieldNode{fldType: $1, name: $2, tag: $4, options: $6}
+		$$.setRange($1, $8)
+	}
+
+fieldOptions : fieldOptions ',' fieldOption {
+		$$ = append($1, $3...)
+	}
+	| fieldOption
+
+fieldOption: optionName '=' constant {
+		n := &optionNameNode{parts: $1}
+		n.setRange($1[0], $1[len($1)-1])
+		o := &optionNode{name: n, val: $3}
+		o.setRange($1[0], $3)
+		$$ = []*optionNode{o}
+	}
+
+group : _REQUIRED _GROUP name '=' _INT_LIT '{' messageBody '}' {
+		checkTag(protolex, $5.start(), $5.val)
+		if !unicode.IsUpper(rune($3.val[0])) {
+			lexError(protolex, $3.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", $3.val))
+		}
+		lbl := &labelNode{basicNode: $1.basicNode, required: true}
+		$$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, decls: $7}
+		$$.setRange($1, $8)
+	}
+	| _OPTIONAL _GROUP name '=' _INT_LIT '{' messageBody '}' {
+		checkTag(protolex, $5.start(), $5.val)
+		if !unicode.IsUpper(rune($3.val[0])) {
+			lexError(protolex, $3.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", $3.val))
+		}
+		lbl := &labelNode{basicNode: $1.basicNode}
+		$$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, decls: $7}
+		$$.setRange($1, $8)
+	}
+	| _REPEATED _GROUP name '=' _INT_LIT '{' messageBody '}' {
+		checkTag(protolex, $5.start(), $5.val)
+		if !unicode.IsUpper(rune($3.val[0])) {
+			lexError(protolex, $3.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", $3.val))
+		}
+		lbl := &labelNode{basicNode: $1.basicNode, repeated: true}
+		$$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, decls: $7}
+		$$.setRange($1, $8)
+	}
+
+oneof : _ONEOF name '{' oneofBody '}' {
+		c := 0
+		for _, el := range $4 {
+			if el.field != nil {
+				c++
+			}
+		}
+		if c == 0 {
+			lexError(protolex, $1.start(), "oneof must contain at least one field")
+		}
+		$$ = &oneOfNode{name: $2, decls: $4}
+		$$.setRange($1, $5)
+	}
+
+oneofBody : oneofBody oneofItem {
+		$$ = append($1, $2...)
+	}
+	| oneofItem
+	| {
+		$$ = nil
+	}
+
+oneofItem : option {
+		$$ = []*oneOfElement{{option: $1[0]}}
+	}
+	| oneofField {
+		$$ = []*oneOfElement{{field: $1}}
+	}
+	| ';' {
+		$$ = []*oneOfElement{{empty: $1}}
+	}
+
+oneofField : typeIdent name '=' _INT_LIT ';' {
+		checkTag(protolex, $4.start(), $4.val)
+		$$ = &fieldNode{fldType: $1, name: $2, tag: $4}
+		$$.setRange($1, $5)
+	}
+	| typeIdent name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $4.start(), $4.val)
+		$$ = &fieldNode{fldType: $1, name: $2, tag: $4, options: $6}
+		$$.setRange($1, $8)
+	}
+
+mapField : _MAP '<' keyType ',' typeIdent '>' name '=' _INT_LIT ';' {
+		checkTag(protolex, $9.start(), $9.val)
+		$$ = &mapFieldNode{mapKeyword: $1, keyType: $3, valueType: $5, name: $7, tag: $9}
+		$$.setRange($1, $10)
+	}
+	| _MAP '<' keyType ',' typeIdent '>' name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkTag(protolex, $9.start(), $9.val)
+		$$ = &mapFieldNode{mapKeyword: $1, keyType: $3, valueType: $5, name: $7, tag: $9, options: $11}
+		$$.setRange($1, $13)
+	}
+
+keyType : _INT32
+	| _INT64
+	| _UINT32
+	| _UINT64
+	| _SINT32
+	| _SINT64
+	| _FIXED32
+	| _FIXED64
+	| _SFIXED32
+	| _SFIXED64
+	| _BOOL
+	| _STRING
+
+extensions : _EXTENSIONS tagRanges ';' {
+		$$ = &extensionRangeNode{ranges: $2}
+		$$.setRange($1, $3)
+	}
+	| _EXTENSIONS tagRanges '[' fieldOptions ']' ';' {
+		$$ = &extensionRangeNode{ranges: $2, options: $4}
+		$$.setRange($1, $6)
+	}
+
+tagRanges : tagRanges ',' tagRange {
+		$$ = append($1, $3...)
+	}
+	| tagRange
+
+tagRange : _INT_LIT {
+		if $1.val > internal.MaxTag {
+			lexError(protolex, $1.start(), fmt.Sprintf("range includes out-of-range tag: %d (should be between 0 and %d)", $1.val, internal.MaxTag))
+		}
+		r := &rangeNode{stNode: $1, enNode: $1, st: int32($1.val), en: int32($1.val)}
+		r.setRange($1, $1)
+		$$ = []*rangeNode{r}
+	}
+	| _INT_LIT _TO _INT_LIT {
+		if $1.val > internal.MaxTag {
+			lexError(protolex, $1.start(), fmt.Sprintf("range start is out-of-range tag: %d (should be between 0 and %d)", $1.val, internal.MaxTag))
+		}
+		if $3.val > internal.MaxTag {
+			lexError(protolex, $3.start(), fmt.Sprintf("range end is out-of-range tag: %d (should be between 0 and %d)", $3.val, internal.MaxTag))
+		}
+		if $1.val > $3.val {
+			lexError(protolex, $1.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", $1.val, $3.val))
+		}
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: int32($3.val)}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+	| _INT_LIT _TO _MAX {
+		if $1.val > internal.MaxTag {
+			lexError(protolex, $1.start(), fmt.Sprintf("range start is out-of-range tag: %d (should be between 0 and %d)", $1.val, internal.MaxTag))
+		}
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: internal.MaxTag}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+
+enumRanges : enumRanges ',' enumRange {
+		$$ = append($1, $3...)
+	}
+	| enumRange
+
+enumRange : _INT_LIT {
+		checkUint64InInt32Range(protolex, $1.start(), $1.val)
+		r := &rangeNode{stNode: $1, enNode: $1, st: int32($1.val), en: int32($1.val)}
+		r.setRange($1, $1)
+		$$ = []*rangeNode{r}
+	}
+	| negIntLit {
+		checkInt64InInt32Range(protolex, $1.start(), $1.val)
+		r := &rangeNode{stNode: $1, enNode: $1, st: int32($1.val), en: int32($1.val)}
+		r.setRange($1, $1)
+		$$ = []*rangeNode{r}
+	}
+	| _INT_LIT _TO _INT_LIT {
+		checkUint64InInt32Range(protolex, $1.start(), $1.val)
+		checkUint64InInt32Range(protolex, $3.start(), $3.val)
+		if $1.val > $3.val {
+			lexError(protolex, $1.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", $1.val, $3.val))
+		}
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: int32($3.val)}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+	| negIntLit _TO negIntLit {
+		checkInt64InInt32Range(protolex, $1.start(), $1.val)
+		checkInt64InInt32Range(protolex, $3.start(), $3.val)
+		if $1.val > $3.val {
+			lexError(protolex, $1.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", $1.val, $3.val))
+		}
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: int32($3.val)}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+	| negIntLit _TO _INT_LIT {
+		checkInt64InInt32Range(protolex, $1.start(), $1.val)
+		checkUint64InInt32Range(protolex, $3.start(), $3.val)
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: int32($3.val)}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+	| _INT_LIT _TO _MAX {
+		checkUint64InInt32Range(protolex, $1.start(), $1.val)
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: math.MaxInt32}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+	| negIntLit _TO _MAX {
+		checkInt64InInt32Range(protolex, $1.start(), $1.val)
+		r := &rangeNode{stNode: $1, enNode: $3, st: int32($1.val), en: math.MaxInt32}
+		r.setRange($1, $3)
+		$$ = []*rangeNode{r}
+	}
+
+msgReserved : _RESERVED tagRanges ';' {
+		$$ = &reservedNode{ranges: $2}
+		$$.setRange($1, $3)
+	}
+	| reservedNames
+
+enumReserved : _RESERVED enumRanges ';' {
+		$$ = &reservedNode{ranges: $2}
+		$$.setRange($1, $3)
+	}
+	| reservedNames
+
+reservedNames : _RESERVED fieldNames ';' {
+		rsvd := map[string]struct{}{}
+		for _, n := range $2 {
+			if _, ok := rsvd[n.val]; ok {
+				lexError(protolex, n.start(), fmt.Sprintf("name %q is reserved multiple times", n.val))
+				break
+			}
+			rsvd[n.val] = struct{}{}
+		}
+		$$ = &reservedNode{names: $2}
+		$$.setRange($1, $3)
+	}
+
+fieldNames : fieldNames ',' stringLit {
+		$$ = append($1, $3)
+	}
+	| stringLit {
+		$$ = []*stringLiteralNode{$1}
+	}
+
+enum : _ENUM name '{' enumBody '}' {
+		c := 0
+		for _, el := range $4 {
+			if el.value != nil {
+				c++
+			}
+		}
+		if c == 0 {
+			lexError(protolex, $1.start(), "enums must define at least one value")
+		}
+		$$ = &enumNode{name: $2, decls: $4}
+		$$.setRange($1, $5)
+	}
+
+enumBody : enumBody enumItem {
+		$$ = append($1, $2...)
+	}
+	| enumItem
+	| {
+		$$ = nil
+	}
+
+enumItem : option {
+		$$ = []*enumElement{{option: $1[0]}}
+	}
+	| enumField {
+		$$ = []*enumElement{{value: $1}}
+	}
+	| enumReserved {
+		$$ = []*enumElement{{reserved: $1}}
+	}
+	| ';' {
+		$$ = []*enumElement{{empty: $1}}
+	}
+
+enumField : name '=' _INT_LIT ';' {
+		checkUint64InInt32Range(protolex, $3.start(), $3.val)
+		$$ = &enumValueNode{name: $1, numberP: $3}
+		$$.setRange($1, $4)
+	}
+	|  name '=' _INT_LIT '[' fieldOptions ']' ';' {
+		checkUint64InInt32Range(protolex, $3.start(), $3.val)
+		$$ = &enumValueNode{name: $1, numberP: $3, options: $5}
+		$$.setRange($1, $7)
+	}
+	| name '=' negIntLit ';' {
+		checkInt64InInt32Range(protolex, $3.start(), $3.val)
+		$$ = &enumValueNode{name: $1, numberN: $3}
+		$$.setRange($1, $4)
+	}
+	|  name '=' negIntLit '[' fieldOptions ']' ';' {
+		checkInt64InInt32Range(protolex, $3.start(), $3.val)
+		$$ = &enumValueNode{name: $1, numberN: $3, options: $5}
+		$$.setRange($1, $7)
+	}
+
+message : _MESSAGE name '{' messageBody '}' {
+		$$ = &messageNode{name: $2, decls: $4}
+		$$.setRange($1, $5)
+	}
+
+messageBody : messageBody messageItem {
+		$$ = append($1, $2...)
+	}
+	| messageItem
+	| {
+		$$ = nil
+	}
+
+messageItem : field {
+		$$ = []*messageElement{{field: $1}}
+	}
+	| enum {
+		$$ = []*messageElement{{enum: $1}}
+	}
+	| message {
+		$$ = []*messageElement{{nested: $1}}
+	}
+	| extend {
+		$$ = []*messageElement{{extend: $1}}
+	}
+	| extensions {
+		$$ = []*messageElement{{extensionRange: $1}}
+	}
+	| group {
+		$$ = []*messageElement{{group: $1}}
+	}
+	| option {
+		$$ = []*messageElement{{option: $1[0]}}
+	}
+	| oneof {
+		$$ = []*messageElement{{oneOf: $1}}
+	}
+	| mapField {
+		$$ = []*messageElement{{mapField: $1}}
+	}
+	| msgReserved {
+		$$ = []*messageElement{{reserved: $1}}
+	}
+	| ';' {
+		$$ = []*messageElement{{empty: $1}}
+	}
+
+extend : _EXTEND typeIdent '{' extendBody '}' {
+		c := 0
+		for _, el := range $4 {
+			if el.field != nil || el.group != nil {
+				c++
+			}
+		}
+		if c == 0 {
+			lexError(protolex, $1.start(), "extend sections must define at least one extension")
+		}
+		$$ = &extendNode{extendee: $2, decls: $4}
+		$$.setRange($1, $5)
+	}
+
+extendBody : extendBody extendItem {
+		$$ = append($1, $2...)
+	}
+	| extendItem
+	| {
+		$$ = nil
+	}
+
+extendItem : field {
+		$$ = []*extendElement{{field: $1}}
+	}
+	| group {
+		$$ = []*extendElement{{group: $1}}
+	}
+	| ';' {
+		$$ = []*extendElement{{empty: $1}}
+	}
+
+service : _SERVICE name '{' serviceBody '}' {
+		$$ = &serviceNode{name: $2, decls: $4}
+		$$.setRange($1, $5)
+	}
+
+serviceBody : serviceBody serviceItem {
+		$$ = append($1, $2...)
+	}
+	| serviceItem
+	| {
+		$$ = nil
+	}
+
+// NB: doc suggests support for "stream" declaration, separate from "rpc", but
+// it does not appear to be supported in protoc (doc is likely from grammar for
+// Google-internal version of protoc, with support for streaming stubby)
+serviceItem : option {
+		$$ = []*serviceElement{{option: $1[0]}}
+	}
+	| rpc {
+		$$ = []*serviceElement{{rpc: $1}}
+	}
+	| ';' {
+		$$ = []*serviceElement{{empty: $1}}
+	}
+
+rpc : _RPC name '(' rpcType ')' _RETURNS '(' rpcType ')' ';' {
+		$$ = &methodNode{name: $2, input: $4, output: $8}
+		$$.setRange($1, $10)
+	}
+	| _RPC name '(' rpcType ')' _RETURNS '(' rpcType ')' '{' rpcOptions '}' {
+		$$ = &methodNode{name: $2, input: $4, output: $8, options: $11}
+		$$.setRange($1, $12)
+	}
+
+rpcType : _STREAM typeIdent {
+		$$ = &rpcTypeNode{msgType: $2, streamKeyword: $1}
+		$$.setRange($1, $2)
+	}
+	| typeIdent {
+		$$ = &rpcTypeNode{msgType: $1}
+		$$.setRange($1, $1)
+	}
+
+rpcOptions : rpcOptions rpcOption {
+		$$ = append($1, $2...)
+	}
+	| rpcOption
+	| {
+		$$ = []*optionNode{}
+	}
+
+rpcOption : option {
+		$$ = $1
+	}
+	| ';' {
+		$$ = []*optionNode{}
+	}
+
+name : _NAME
+	| _SYNTAX
+	| _IMPORT
+	| _WEAK
+	| _PUBLIC
+	| _PACKAGE
+	| _OPTION
+	| _TRUE
+	| _FALSE
+	| _INF
+	| _NAN
+	| _REPEATED
+	| _OPTIONAL
+	| _REQUIRED
+	| _DOUBLE
+	| _FLOAT
+	| _INT32
+	| _INT64
+	| _UINT32
+	| _UINT64
+	| _SINT32
+	| _SINT64
+	| _FIXED32
+	| _FIXED64
+	| _SFIXED32
+	| _SFIXED64
+	| _BOOL
+	| _STRING
+	| _BYTES
+	| _GROUP
+	| _ONEOF
+	| _MAP
+	| _EXTENSIONS
+	| _TO
+	| _MAX
+	| _RESERVED
+	| _ENUM
+	| _MESSAGE
+	| _EXTEND
+	| _SERVICE
+	| _RPC
+	| _STREAM
+	| _RETURNS
+
+%%
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y.go
new file mode 100644
index 0000000..6b8a4e6
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y.go
@@ -0,0 +1,2093 @@
+// Code generated by goyacc -o proto.y.go -p proto proto.y. DO NOT EDIT.
+
+//line proto.y:2
+package protoparse
+
+import __yyfmt__ "fmt"
+
+//line proto.y:2
+
+//lint:file-ignore SA4006 generated parser has unused values
+
+import (
+	"fmt"
+	"math"
+	"unicode"
+
+	"github.com/jhump/protoreflect/desc/internal"
+)
+
+//line proto.y:18
+type protoSymType struct {
+	yys       int
+	file      *fileNode
+	fileDecls []*fileElement
+	syn       *syntaxNode
+	pkg       *packageNode
+	imprt     *importNode
+	msg       *messageNode
+	msgDecls  []*messageElement
+	fld       *fieldNode
+	mapFld    *mapFieldNode
+	grp       *groupNode
+	oo        *oneOfNode
+	ooDecls   []*oneOfElement
+	ext       *extensionRangeNode
+	resvd     *reservedNode
+	en        *enumNode
+	enDecls   []*enumElement
+	env       *enumValueNode
+	extend    *extendNode
+	extDecls  []*extendElement
+	svc       *serviceNode
+	svcDecls  []*serviceElement
+	mtd       *methodNode
+	rpcType   *rpcTypeNode
+	opts      []*optionNode
+	optNm     []*optionNamePartNode
+	rngs      []*rangeNode
+	names     []*stringLiteralNode
+	sl        []valueNode
+	agg       []*aggregateEntryNode
+	aggName   *aggregateNameNode
+	v         valueNode
+	str       *stringLiteralNode
+	i         *negativeIntLiteralNode
+	ui        *intLiteralNode
+	f         *floatLiteralNode
+	id        *identNode
+	b         *basicNode
+	err       error
+}
+
+const _STRING_LIT = 57346
+const _INT_LIT = 57347
+const _FLOAT_LIT = 57348
+const _NAME = 57349
+const _FQNAME = 57350
+const _TYPENAME = 57351
+const _SYNTAX = 57352
+const _IMPORT = 57353
+const _WEAK = 57354
+const _PUBLIC = 57355
+const _PACKAGE = 57356
+const _OPTION = 57357
+const _TRUE = 57358
+const _FALSE = 57359
+const _INF = 57360
+const _NAN = 57361
+const _REPEATED = 57362
+const _OPTIONAL = 57363
+const _REQUIRED = 57364
+const _DOUBLE = 57365
+const _FLOAT = 57366
+const _INT32 = 57367
+const _INT64 = 57368
+const _UINT32 = 57369
+const _UINT64 = 57370
+const _SINT32 = 57371
+const _SINT64 = 57372
+const _FIXED32 = 57373
+const _FIXED64 = 57374
+const _SFIXED32 = 57375
+const _SFIXED64 = 57376
+const _BOOL = 57377
+const _STRING = 57378
+const _BYTES = 57379
+const _GROUP = 57380
+const _ONEOF = 57381
+const _MAP = 57382
+const _EXTENSIONS = 57383
+const _TO = 57384
+const _MAX = 57385
+const _RESERVED = 57386
+const _ENUM = 57387
+const _MESSAGE = 57388
+const _EXTEND = 57389
+const _SERVICE = 57390
+const _RPC = 57391
+const _STREAM = 57392
+const _RETURNS = 57393
+const _ERROR = 57394
+
+var protoToknames = [...]string{
+	"$end",
+	"error",
+	"$unk",
+	"_STRING_LIT",
+	"_INT_LIT",
+	"_FLOAT_LIT",
+	"_NAME",
+	"_FQNAME",
+	"_TYPENAME",
+	"_SYNTAX",
+	"_IMPORT",
+	"_WEAK",
+	"_PUBLIC",
+	"_PACKAGE",
+	"_OPTION",
+	"_TRUE",
+	"_FALSE",
+	"_INF",
+	"_NAN",
+	"_REPEATED",
+	"_OPTIONAL",
+	"_REQUIRED",
+	"_DOUBLE",
+	"_FLOAT",
+	"_INT32",
+	"_INT64",
+	"_UINT32",
+	"_UINT64",
+	"_SINT32",
+	"_SINT64",
+	"_FIXED32",
+	"_FIXED64",
+	"_SFIXED32",
+	"_SFIXED64",
+	"_BOOL",
+	"_STRING",
+	"_BYTES",
+	"_GROUP",
+	"_ONEOF",
+	"_MAP",
+	"_EXTENSIONS",
+	"_TO",
+	"_MAX",
+	"_RESERVED",
+	"_ENUM",
+	"_MESSAGE",
+	"_EXTEND",
+	"_SERVICE",
+	"_RPC",
+	"_STREAM",
+	"_RETURNS",
+	"_ERROR",
+	"'='",
+	"';'",
+	"':'",
+	"'{'",
+	"'}'",
+	"'\\\\'",
+	"'/'",
+	"'?'",
+	"'.'",
+	"','",
+	"'>'",
+	"'<'",
+	"'+'",
+	"'-'",
+	"'('",
+	"')'",
+	"'['",
+	"']'",
+	"'*'",
+	"'&'",
+	"'^'",
+	"'%'",
+	"'$'",
+	"'#'",
+	"'@'",
+	"'!'",
+	"'~'",
+	"'`'",
+}
+var protoStatenames = [...]string{}
+
+const protoEofCode = 1
+const protoErrCode = 2
+const protoInitialStackSize = 16
+
+//line proto.y:937
+
+//line yacctab:1
+var protoExca = [...]int{
+	-1, 1,
+	1, -1,
+	-2, 0,
+}
+
+const protoPrivate = 57344
+
+const protoLast = 2050
+
+var protoAct = [...]int{
+
+	120, 8, 288, 8, 8, 386, 264, 80, 128, 113,
+	159, 160, 265, 271, 103, 196, 185, 112, 100, 101,
+	29, 171, 8, 28, 75, 119, 99, 114, 79, 153,
+	137, 148, 266, 184, 24, 139, 306, 255, 77, 78,
+	319, 82, 306, 83, 389, 86, 87, 306, 318, 74,
+	378, 306, 98, 306, 306, 363, 317, 306, 306, 361,
+	306, 359, 351, 222, 379, 338, 337, 366, 307, 328,
+	377, 224, 325, 322, 304, 280, 278, 286, 223, 380,
+	315, 356, 367, 197, 329, 90, 243, 326, 323, 305,
+	281, 279, 297, 140, 111, 154, 27, 197, 249, 214,
+	209, 106, 188, 336, 246, 276, 241, 330, 240, 211,
+	105, 173, 245, 144, 242, 287, 224, 208, 381, 150,
+	382, 149, 176, 146, 327, 207, 324, 163, 16, 226,
+	94, 93, 92, 91, 177, 179, 181, 16, 199, 140,
+	79, 75, 85, 392, 199, 383, 368, 199, 374, 183,
+	78, 77, 373, 154, 16, 187, 191, 372, 199, 144,
+	198, 365, 157, 174, 85, 191, 74, 156, 355, 146,
+	189, 206, 212, 150, 193, 149, 388, 354, 204, 201,
+	163, 210, 203, 14, 333, 158, 15, 16, 157, 85,
+	85, 88, 97, 156, 213, 16, 202, 335, 215, 216,
+	217, 218, 219, 220, 308, 262, 261, 4, 14, 244,
+	260, 15, 16, 376, 96, 259, 258, 18, 17, 19,
+	20, 257, 254, 256, 221, 339, 13, 272, 252, 194,
+	105, 75, 163, 248, 388, 275, 250, 390, 283, 95,
+	84, 267, 18, 17, 19, 20, 89, 23, 247, 225,
+	352, 13, 268, 303, 168, 169, 27, 186, 290, 302,
+	198, 282, 277, 285, 295, 301, 206, 170, 300, 5,
+	299, 272, 105, 22, 163, 163, 284, 117, 11, 275,
+	11, 11, 165, 166, 310, 312, 313, 75, 314, 75,
+	269, 22, 27, 155, 298, 167, 311, 186, 316, 11,
+	25, 26, 263, 168, 293, 320, 85, 206, 27, 152,
+	12, 147, 331, 75, 75, 163, 163, 3, 141, 332,
+	21, 115, 10, 138, 10, 10, 118, 195, 142, 105,
+	345, 75, 206, 347, 75, 123, 349, 75, 190, 105,
+	105, 163, 346, 10, 270, 348, 116, 9, 350, 9,
+	9, 122, 357, 121, 358, 273, 176, 353, 176, 369,
+	176, 334, 163, 161, 163, 290, 292, 104, 9, 206,
+	206, 340, 342, 102, 375, 75, 162, 227, 163, 163,
+	172, 385, 7, 387, 6, 2, 387, 384, 75, 1,
+	0, 391, 27, 107, 110, 31, 0, 0, 32, 33,
+	34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+	0, 0, 0, 0, 106, 0, 0, 0, 0, 0,
+	0, 0, 294, 108, 109, 0, 0, 0, 291, 27,
+	107, 110, 31, 0, 0, 32, 33, 34, 35, 36,
+	37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
+	47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+	57, 58, 59, 60, 61, 62, 63, 64, 65, 66,
+	67, 68, 69, 70, 71, 72, 73, 0, 0, 0,
+	0, 106, 0, 0, 0, 0, 0, 0, 0, 253,
+	108, 109, 0, 0, 251, 27, 107, 110, 31, 0,
+	0, 32, 33, 34, 35, 36, 37, 38, 39, 40,
+	41, 42, 43, 44, 45, 46, 47, 48, 49, 50,
+	51, 52, 53, 54, 55, 56, 57, 58, 59, 60,
+	61, 62, 63, 64, 65, 66, 67, 68, 69, 70,
+	71, 72, 73, 0, 0, 0, 0, 106, 0, 0,
+	0, 0, 0, 0, 0, 343, 108, 109, 27, 107,
+	110, 31, 0, 0, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	106, 0, 0, 0, 0, 0, 0, 0, 341, 108,
+	109, 27, 107, 110, 31, 0, 0, 32, 33, 34,
+	35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+	45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+	55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+	65, 66, 67, 68, 69, 70, 71, 72, 73, 0,
+	0, 0, 0, 106, 0, 0, 0, 0, 0, 0,
+	0, 31, 108, 109, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 371, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 370, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 344, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 321, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 296, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 205, 0, 0, 0, 0, 0, 0, 0, 0,
+	0, 31, 0, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 228, 229, 230, 231,
+	232, 233, 234, 235, 236, 237, 238, 239, 0, 0,
+	0, 31, 30, 164, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+	58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+	68, 69, 70, 71, 72, 73, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+	0, 76, 31, 30, 81, 32, 33, 34, 35, 36,
+	133, 38, 39, 40, 41, 127, 126, 125, 45, 46,
+	47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+	57, 58, 59, 60, 134, 135, 132, 64, 65, 136,
+	129, 130, 131, 70, 71, 72, 73, 0, 0, 124,
+	0, 0, 364, 31, 30, 81, 32, 33, 34, 35,
+	36, 133, 38, 39, 40, 41, 127, 126, 125, 45,
+	46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
+	56, 57, 58, 59, 60, 134, 135, 132, 64, 65,
+	136, 129, 130, 131, 70, 71, 72, 73, 0, 0,
+	124, 0, 0, 362, 31, 30, 81, 32, 33, 34,
+	35, 36, 133, 38, 39, 40, 41, 127, 126, 125,
+	45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+	55, 56, 57, 58, 59, 60, 134, 135, 132, 64,
+	65, 136, 129, 130, 131, 70, 71, 72, 73, 0,
+	0, 124, 0, 0, 360, 31, 30, 81, 32, 33,
+	34, 35, 36, 133, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+	0, 0, 274, 0, 0, 309, 31, 30, 81, 32,
+	33, 34, 35, 36, 37, 38, 39, 40, 41, 127,
+	126, 125, 45, 46, 47, 48, 49, 50, 51, 52,
+	53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
+	63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+	73, 0, 0, 151, 0, 0, 200, 31, 30, 81,
+	32, 33, 34, 35, 36, 133, 38, 39, 40, 41,
+	127, 126, 125, 45, 46, 47, 48, 49, 50, 51,
+	52, 53, 54, 55, 56, 57, 58, 59, 60, 134,
+	135, 132, 64, 65, 136, 129, 130, 131, 70, 71,
+	72, 73, 0, 0, 124, 31, 0, 175, 32, 33,
+	34, 35, 36, 133, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+	64, 65, 145, 67, 68, 69, 70, 71, 72, 73,
+	0, 0, 143, 0, 0, 192, 31, 30, 81, 32,
+	33, 34, 35, 36, 133, 38, 39, 40, 41, 127,
+	126, 125, 45, 46, 47, 48, 49, 50, 51, 52,
+	53, 54, 55, 56, 57, 58, 59, 60, 134, 135,
+	132, 64, 65, 136, 129, 130, 131, 70, 71, 72,
+	73, 0, 0, 124, 31, 30, 81, 32, 33, 34,
+	35, 36, 133, 38, 39, 40, 41, 42, 43, 44,
+	45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+	55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+	65, 66, 67, 68, 69, 70, 71, 72, 73, 0,
+	0, 274, 31, 30, 81, 32, 33, 34, 35, 36,
+	37, 38, 39, 40, 41, 127, 126, 125, 45, 46,
+	47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+	57, 58, 59, 60, 61, 62, 63, 64, 65, 66,
+	67, 68, 69, 70, 71, 72, 73, 31, 0, 151,
+	32, 33, 34, 35, 36, 133, 38, 39, 40, 41,
+	42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+	52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+	62, 63, 64, 65, 145, 67, 68, 69, 70, 71,
+	72, 73, 0, 0, 143, 31, 30, 81, 32, 33,
+	34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 289, 73,
+	31, 30, 81, 32, 33, 34, 35, 36, 37, 38,
+	39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+	49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+	59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
+	69, 70, 71, 72, 73, 31, 30, 81, 32, 33,
+	34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 182, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+	31, 30, 81, 32, 33, 34, 35, 36, 37, 38,
+	39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+	49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+	59, 180, 61, 62, 63, 64, 65, 66, 67, 68,
+	69, 70, 71, 72, 73, 31, 30, 81, 32, 33,
+	34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 178, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+	31, 30, 0, 32, 33, 34, 35, 36, 37, 38,
+	39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+	49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+	59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
+	69, 70, 71, 72, 73, 31, 0, 0, 32, 33,
+	34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+	44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+	54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+	64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+}
+var protoPact = [...]int{
+
+	197, -1000, 172, 172, 194, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, 288, 1953, 1124, 1998, 1998, 1773,
+	1998, 172, -1000, 304, 186, 304, 304, -1000, 137, -1000,
+	-1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, 193, -1000, 1773, 77, 76, 75,
+	-1000, -1000, 74, 185, -1000, -1000, 160, 138, -1000, 647,
+	26, 1539, 1680, 1635, 113, -1000, -1000, -1000, 131, -1000,
+	-1000, 302, -1000, -1000, -1000, -1000, 1064, -1000, 277, 249,
+	-1000, 102, 1440, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, -1000, 1908, 1863, 1818, 1998, 1998,
+	1998, 1773, 292, 1124, 1998, 38, 252, -1000, 1488, -1000,
+	-1000, -1000, -1000, -1000, 176, 92, -1000, 1389, -1000, -1000,
+	-1000, -1000, 139, -1000, -1000, -1000, -1000, 1998, -1000, 1004,
+	-1000, 63, 45, -1000, 1953, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, 102, -1000, 32, -1000, -1000, 1998, 1998, 1998,
+	1998, 1998, 1998, 171, 9, -1000, 207, 73, 1091, 54,
+	52, 302, -1000, -1000, 81, 50, -1000, 206, 191, 298,
+	-1000, -1000, -1000, -1000, 31, -1000, -1000, -1000, -1000, 455,
+	-1000, 1064, -33, -1000, 1773, 168, 163, 162, 157, 153,
+	152, 297, -1000, 1124, 292, 247, 1587, 43, -1000, -1000,
+	-1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+	-1000, -1000, 304, 22, 21, 78, -1000, 233, 72, 1728,
+	-1000, 388, -1000, 1064, 944, -1000, 24, 289, 265, 263,
+	260, 254, 248, 20, -2, -1000, 151, -1000, -1000, -1000,
+	1338, -1000, -1000, -1000, -1000, 1998, 1773, 302, -1000, 1124,
+	-1000, 1124, -1000, -1000, -1000, -1000, -1000, -1000, 12, 1773,
+	-1000, -1000, -14, -1000, 1064, 884, -1000, -1000, 19, 70,
+	18, 68, 15, 51, -1000, 1124, 1124, 130, 647, -1000,
+	-1000, 144, 40, -4, -5, 174, -1000, -1000, 584, 521,
+	824, -1000, -1000, 1124, 1539, -1000, 1124, 1539, -1000, 1124,
+	1539, -8, -1000, -1000, -1000, 245, 1998, 123, 114, 14,
+	-1000, 1064, -1000, 1064, -1000, -9, 1287, -11, 1236, -15,
+	1185, 107, 13, 93, -1000, -1000, 1728, 764, 704, 103,
+	-1000, 98, -1000, 94, -1000, -1000, -1000, 1124, 208, 2,
+	-1000, -1000, -1000, -1000, -1000, -20, 10, 64, 91, -1000,
+	1124, -1000, 122, -1000, -26, 180, -1000, -1000, -1000, 89,
+	-1000, -1000, -1000,
+}
+var protoPgo = [...]int{
+
+	0, 389, 385, 269, 317, 384, 382, 0, 12, 6,
+	5, 381, 32, 21, 380, 52, 26, 18, 20, 7,
+	8, 377, 376, 14, 373, 367, 366, 10, 11, 363,
+	27, 355, 353, 25, 351, 346, 9, 17, 13, 344,
+	338, 335, 328, 30, 16, 33, 15, 327, 326, 321,
+	35, 323, 318, 277, 31, 311, 19, 310, 29, 309,
+	293, 2,
+}
+var protoR1 = [...]int{
+
+	0, 1, 1, 1, 1, 4, 4, 3, 3, 3,
+	3, 3, 3, 3, 3, 2, 5, 5, 5, 6,
+	19, 19, 7, 12, 12, 12, 13, 13, 14, 14,
+	15, 15, 16, 16, 16, 16, 16, 24, 24, 23,
+	25, 25, 25, 25, 25, 56, 56, 17, 27, 27,
+	27, 28, 28, 28, 29, 29, 29, 29, 29, 29,
+	29, 22, 22, 26, 26, 26, 26, 26, 26, 20,
+	20, 30, 30, 30, 30, 30, 30, 30, 30, 9,
+	9, 8, 33, 33, 33, 32, 39, 39, 39, 38,
+	38, 38, 31, 31, 34, 34, 21, 21, 21, 21,
+	21, 21, 21, 21, 21, 21, 21, 21, 48, 48,
+	45, 45, 44, 44, 44, 47, 47, 46, 46, 46,
+	46, 46, 46, 46, 41, 41, 42, 42, 43, 40,
+	40, 49, 51, 51, 51, 50, 50, 50, 50, 52,
+	52, 52, 52, 35, 37, 37, 37, 36, 36, 36,
+	36, 36, 36, 36, 36, 36, 36, 36, 53, 55,
+	55, 55, 54, 54, 54, 57, 59, 59, 59, 58,
+	58, 58, 60, 60, 61, 61, 11, 11, 11, 10,
+	10, 18, 18, 18, 18, 18, 18, 18, 18, 18,
+	18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
+	18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
+	18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
+	18, 18, 18, 18,
+}
+var protoR2 = [...]int{
+
+	0, 1, 1, 2, 0, 2, 1, 1, 1, 1,
+	1, 1, 1, 1, 1, 4, 3, 4, 4, 3,
+	1, 1, 5, 1, 3, 4, 1, 2, 1, 4,
+	1, 1, 1, 1, 1, 1, 1, 1, 2, 2,
+	1, 2, 2, 2, 2, 1, 2, 3, 1, 2,
+	0, 1, 2, 2, 3, 4, 5, 3, 2, 5,
+	4, 1, 3, 1, 3, 3, 3, 5, 5, 1,
+	1, 6, 6, 6, 5, 9, 9, 9, 8, 3,
+	1, 3, 8, 8, 8, 5, 2, 1, 0, 1,
+	1, 1, 5, 8, 10, 13, 1, 1, 1, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 3, 6,
+	3, 1, 1, 3, 3, 3, 1, 1, 1, 3,
+	3, 3, 3, 3, 3, 1, 3, 1, 3, 3,
+	1, 5, 2, 1, 0, 1, 1, 1, 1, 4,
+	7, 4, 7, 5, 2, 1, 0, 1, 1, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 5, 2,
+	1, 0, 1, 1, 1, 5, 2, 1, 0, 1,
+	1, 1, 10, 12, 2, 1, 2, 1, 0, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+	1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+	1, 1, 1, 1,
+}
+var protoChk = [...]int{
+
+	-1000, -1, -2, -4, 10, -3, -5, -6, -7, -35,
+	-49, -53, -57, 54, 11, 14, 15, 46, 45, 47,
+	48, -4, -3, 53, -56, 12, 13, 4, -19, -18,
+	8, 7, 10, 11, 12, 13, 14, 15, 16, 17,
+	18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+	28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+	38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+	48, 49, 50, 51, -12, -19, 67, -18, -18, -20,
+	-19, 9, -18, -56, 54, 4, -56, -56, 54, 53,
+	-20, 56, 56, 56, 56, 54, 54, 54, -15, -16,
+	-17, -56, -24, -23, -25, -18, 56, 5, 65, 66,
+	6, 68, -37, -36, -30, -49, -35, -53, -48, -33,
+	-7, -32, -34, -41, 54, 22, 21, 20, -20, 45,
+	46, 47, 41, 15, 39, 40, 44, -43, -51, -50,
+	-7, -52, -42, 54, -18, 44, -43, -55, -54, -30,
+	-33, 54, -59, -58, -7, -60, 54, 49, 54, -27,
+	-28, -29, -22, -18, 69, 5, 6, 18, 5, 6,
+	18, -13, -14, 9, 61, 57, -36, -20, 38, -20,
+	38, -20, 38, -18, -45, -44, 5, -18, 64, -45,
+	-40, -56, 57, -50, 53, -47, -46, 5, -23, 66,
+	57, -54, 57, -58, -18, 57, -28, 62, 54, 55,
+	-17, 64, -19, -13, 67, -18, -18, -18, -18, -18,
+	-18, 53, 54, 69, 62, 42, 56, -21, 25, 26,
+	27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
+	54, 54, 62, 5, -23, 62, 54, 42, 42, 67,
+	-16, 69, -17, 64, -27, 70, -20, 53, 53, 53,
+	53, 53, 53, 5, -9, -8, -12, -44, 5, 43,
+	-39, -38, -7, -31, 54, -20, 62, -56, 54, 69,
+	54, 69, -46, 5, 43, -23, 5, 43, -61, 50,
+	-20, 70, -26, -15, 64, -27, 63, 68, 5, 5,
+	5, 5, 5, 5, 54, 69, 62, 70, 53, 57,
+	-38, -18, -20, -9, -9, 68, -20, 70, 62, 54,
+	-27, 63, 54, 69, 56, 54, 69, 56, 54, 69,
+	56, -9, -8, 54, -15, 53, 63, 70, 70, 51,
+	-15, 64, -15, 64, 63, -9, -37, -9, -37, -9,
+	-37, 70, 5, -18, 54, 54, 67, -27, -27, 70,
+	57, 70, 57, 70, 57, 54, 54, 69, 53, -61,
+	63, 63, 54, 54, 54, -9, 5, 68, 70, 54,
+	69, 54, 56, 54, -9, -11, -10, -7, 54, 70,
+	57, -10, 54,
+}
+var protoDef = [...]int{
+
+	4, -2, 1, 2, 0, 6, 7, 8, 9, 10,
+	11, 12, 13, 14, 0, 0, 0, 0, 0, 0,
+	0, 3, 5, 0, 0, 0, 0, 45, 0, 20,
+	21, 181, 182, 183, 184, 185, 186, 187, 188, 189,
+	190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
+	200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
+	210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
+	220, 221, 222, 223, 0, 23, 0, 0, 0, 0,
+	69, 70, 0, 0, 16, 46, 0, 0, 19, 0,
+	0, 146, 134, 161, 168, 15, 17, 18, 0, 30,
+	31, 32, 33, 34, 35, 36, 50, 37, 0, 0,
+	40, 24, 0, 145, 147, 148, 149, 150, 151, 152,
+	153, 154, 155, 156, 157, 0, 0, 0, 0, 0,
+	0, 0, 213, 187, 0, 212, 216, 125, 0, 133,
+	135, 136, 137, 138, 0, 216, 127, 0, 160, 162,
+	163, 164, 0, 167, 169, 170, 171, 0, 22, 0,
+	48, 51, 0, 61, 0, 38, 42, 43, 39, 41,
+	44, 25, 26, 28, 0, 143, 144, 0, 0, 0,
+	0, 0, 0, 0, 0, 111, 112, 0, 0, 0,
+	0, 130, 131, 132, 0, 0, 116, 117, 118, 0,
+	158, 159, 165, 166, 0, 47, 49, 52, 53, 0,
+	58, 50, 0, 27, 0, 0, 0, 0, 0, 0,
+	0, 0, 108, 0, 0, 0, 88, 0, 96, 97,
+	98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+	124, 128, 0, 0, 0, 0, 126, 0, 0, 0,
+	54, 0, 57, 50, 0, 62, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 80, 0, 110, 113, 114,
+	0, 87, 89, 90, 91, 0, 0, 129, 139, 0,
+	141, 0, 115, 119, 122, 120, 121, 123, 0, 222,
+	175, 55, 0, 63, 50, 0, 60, 29, 0, 0,
+	0, 0, 0, 0, 74, 0, 0, 0, 0, 85,
+	86, 0, 0, 0, 0, 0, 174, 56, 0, 0,
+	0, 59, 71, 0, 146, 72, 0, 146, 73, 0,
+	146, 0, 79, 109, 81, 0, 0, 0, 0, 0,
+	64, 50, 65, 50, 66, 0, 0, 0, 0, 0,
+	0, 0, 0, 0, 140, 142, 0, 0, 0, 0,
+	82, 0, 83, 0, 84, 78, 92, 0, 0, 0,
+	67, 68, 75, 76, 77, 0, 0, 0, 0, 94,
+	0, 172, 178, 93, 0, 0, 177, 179, 180, 0,
+	173, 176, 95,
+}
+var protoTok1 = [...]int{
+
+	1, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 78, 3, 76, 75, 74, 72, 3,
+	67, 68, 71, 65, 62, 66, 61, 59, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 55, 54,
+	64, 53, 63, 60, 77, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 69, 58, 70, 73, 3, 80, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+	3, 3, 3, 56, 3, 57, 79,
+}
+var protoTok2 = [...]int{
+
+	2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+	12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
+	22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+	32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+	42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+	52,
+}
+var protoTok3 = [...]int{
+	0,
+}
+
+var protoErrorMessages = [...]struct {
+	state int
+	token int
+	msg   string
+}{}
+
+//line yaccpar:1
+
+/*	parser for yacc output	*/
+
+var (
+	protoDebug        = 0
+	protoErrorVerbose = false
+)
+
+type protoLexer interface {
+	Lex(lval *protoSymType) int
+	Error(s string)
+}
+
+type protoParser interface {
+	Parse(protoLexer) int
+	Lookahead() int
+}
+
+type protoParserImpl struct {
+	lval  protoSymType
+	stack [protoInitialStackSize]protoSymType
+	char  int
+}
+
+func (p *protoParserImpl) Lookahead() int {
+	return p.char
+}
+
+func protoNewParser() protoParser {
+	return &protoParserImpl{}
+}
+
+const protoFlag = -1000
+
+func protoTokname(c int) string {
+	if c >= 1 && c-1 < len(protoToknames) {
+		if protoToknames[c-1] != "" {
+			return protoToknames[c-1]
+		}
+	}
+	return __yyfmt__.Sprintf("tok-%v", c)
+}
+
+func protoStatname(s int) string {
+	if s >= 0 && s < len(protoStatenames) {
+		if protoStatenames[s] != "" {
+			return protoStatenames[s]
+		}
+	}
+	return __yyfmt__.Sprintf("state-%v", s)
+}
+
+func protoErrorMessage(state, lookAhead int) string {
+	const TOKSTART = 4
+
+	if !protoErrorVerbose {
+		return "syntax error"
+	}
+
+	for _, e := range protoErrorMessages {
+		if e.state == state && e.token == lookAhead {
+			return "syntax error: " + e.msg
+		}
+	}
+
+	res := "syntax error: unexpected " + protoTokname(lookAhead)
+
+	// To match Bison, suggest at most four expected tokens.
+	expected := make([]int, 0, 4)
+
+	// Look for shiftable tokens.
+	base := protoPact[state]
+	for tok := TOKSTART; tok-1 < len(protoToknames); tok++ {
+		if n := base + tok; n >= 0 && n < protoLast && protoChk[protoAct[n]] == tok {
+			if len(expected) == cap(expected) {
+				return res
+			}
+			expected = append(expected, tok)
+		}
+	}
+
+	if protoDef[state] == -2 {
+		i := 0
+		for protoExca[i] != -1 || protoExca[i+1] != state {
+			i += 2
+		}
+
+		// Look for tokens that we accept or reduce.
+		for i += 2; protoExca[i] >= 0; i += 2 {
+			tok := protoExca[i]
+			if tok < TOKSTART || protoExca[i+1] == 0 {
+				continue
+			}
+			if len(expected) == cap(expected) {
+				return res
+			}
+			expected = append(expected, tok)
+		}
+
+		// If the default action is to accept or reduce, give up.
+		if protoExca[i+1] != 0 {
+			return res
+		}
+	}
+
+	for i, tok := range expected {
+		if i == 0 {
+			res += ", expecting "
+		} else {
+			res += " or "
+		}
+		res += protoTokname(tok)
+	}
+	return res
+}
+
+func protolex1(lex protoLexer, lval *protoSymType) (char, token int) {
+	token = 0
+	char = lex.Lex(lval)
+	if char <= 0 {
+		token = protoTok1[0]
+		goto out
+	}
+	if char < len(protoTok1) {
+		token = protoTok1[char]
+		goto out
+	}
+	if char >= protoPrivate {
+		if char < protoPrivate+len(protoTok2) {
+			token = protoTok2[char-protoPrivate]
+			goto out
+		}
+	}
+	for i := 0; i < len(protoTok3); i += 2 {
+		token = protoTok3[i+0]
+		if token == char {
+			token = protoTok3[i+1]
+			goto out
+		}
+	}
+
+out:
+	if token == 0 {
+		token = protoTok2[1] /* unknown char */
+	}
+	if protoDebug >= 3 {
+		__yyfmt__.Printf("lex %s(%d)\n", protoTokname(token), uint(char))
+	}
+	return char, token
+}
+
+func protoParse(protolex protoLexer) int {
+	return protoNewParser().Parse(protolex)
+}
+
+func (protorcvr *protoParserImpl) Parse(protolex protoLexer) int {
+	var proton int
+	var protoVAL protoSymType
+	var protoDollar []protoSymType
+	_ = protoDollar // silence set and not used
+	protoS := protorcvr.stack[:]
+
+	Nerrs := 0   /* number of errors */
+	Errflag := 0 /* error recovery flag */
+	protostate := 0
+	protorcvr.char = -1
+	prototoken := -1 // protorcvr.char translated into internal numbering
+	defer func() {
+		// Make sure we report no lookahead when not parsing.
+		protostate = -1
+		protorcvr.char = -1
+		prototoken = -1
+	}()
+	protop := -1
+	goto protostack
+
+ret0:
+	return 0
+
+ret1:
+	return 1
+
+protostack:
+	/* put a state and value onto the stack */
+	if protoDebug >= 4 {
+		__yyfmt__.Printf("char %v in %v\n", protoTokname(prototoken), protoStatname(protostate))
+	}
+
+	protop++
+	if protop >= len(protoS) {
+		nyys := make([]protoSymType, len(protoS)*2)
+		copy(nyys, protoS)
+		protoS = nyys
+	}
+	protoS[protop] = protoVAL
+	protoS[protop].yys = protostate
+
+protonewstate:
+	proton = protoPact[protostate]
+	if proton <= protoFlag {
+		goto protodefault /* simple state */
+	}
+	if protorcvr.char < 0 {
+		protorcvr.char, prototoken = protolex1(protolex, &protorcvr.lval)
+	}
+	proton += prototoken
+	if proton < 0 || proton >= protoLast {
+		goto protodefault
+	}
+	proton = protoAct[proton]
+	if protoChk[proton] == prototoken { /* valid shift */
+		protorcvr.char = -1
+		prototoken = -1
+		protoVAL = protorcvr.lval
+		protostate = proton
+		if Errflag > 0 {
+			Errflag--
+		}
+		goto protostack
+	}
+
+protodefault:
+	/* default state action */
+	proton = protoDef[protostate]
+	if proton == -2 {
+		if protorcvr.char < 0 {
+			protorcvr.char, prototoken = protolex1(protolex, &protorcvr.lval)
+		}
+
+		/* look through exception table */
+		xi := 0
+		for {
+			if protoExca[xi+0] == -1 && protoExca[xi+1] == protostate {
+				break
+			}
+			xi += 2
+		}
+		for xi += 2; ; xi += 2 {
+			proton = protoExca[xi+0]
+			if proton < 0 || proton == prototoken {
+				break
+			}
+		}
+		proton = protoExca[xi+1]
+		if proton < 0 {
+			goto ret0
+		}
+	}
+	if proton == 0 {
+		/* error ... attempt to resume parsing */
+		switch Errflag {
+		case 0: /* brand new error */
+			protolex.Error(protoErrorMessage(protostate, prototoken))
+			Nerrs++
+			if protoDebug >= 1 {
+				__yyfmt__.Printf("%s", protoStatname(protostate))
+				__yyfmt__.Printf(" saw %s\n", protoTokname(prototoken))
+			}
+			fallthrough
+
+		case 1, 2: /* incompletely recovered error ... try again */
+			Errflag = 3
+
+			/* find a state where "error" is a legal shift action */
+			for protop >= 0 {
+				proton = protoPact[protoS[protop].yys] + protoErrCode
+				if proton >= 0 && proton < protoLast {
+					protostate = protoAct[proton] /* simulate a shift of "error" */
+					if protoChk[protostate] == protoErrCode {
+						goto protostack
+					}
+				}
+
+				/* the current p has no shift on "error", pop stack */
+				if protoDebug >= 2 {
+					__yyfmt__.Printf("error recovery pops state %d\n", protoS[protop].yys)
+				}
+				protop--
+			}
+			/* there is no state on the stack with an error shift ... abort */
+			goto ret1
+
+		case 3: /* no shift yet; clobber input char */
+			if protoDebug >= 2 {
+				__yyfmt__.Printf("error recovery discards %s\n", protoTokname(prototoken))
+			}
+			if prototoken == protoEofCode {
+				goto ret1
+			}
+			protorcvr.char = -1
+			prototoken = -1
+			goto protonewstate /* try again in the same state */
+		}
+	}
+
+	/* reduction by production proton */
+	if protoDebug >= 2 {
+		__yyfmt__.Printf("reduce %v in:\n\t%v\n", proton, protoStatname(protostate))
+	}
+
+	protont := proton
+	protopt := protop
+	_ = protopt // guard against "declared and not used"
+
+	protop -= protoR2[proton]
+	// protop is now the index of $0. Perform the default action. Iff the
+	// reduced production is ε, $1 is possibly out of range.
+	if protop+1 >= len(protoS) {
+		nyys := make([]protoSymType, len(protoS)*2)
+		copy(nyys, protoS)
+		protoS = nyys
+	}
+	protoVAL = protoS[protop+1]
+
+	/* consult goto table to find next state */
+	proton = protoR1[proton]
+	protog := protoPgo[proton]
+	protoj := protog + protoS[protop].yys + 1
+
+	if protoj >= protoLast {
+		protostate = protoAct[protog]
+	} else {
+		protostate = protoAct[protoj]
+		if protoChk[protostate] != -proton {
+			protostate = protoAct[protog]
+		}
+	}
+	// dummy call; replaced with literal code
+	switch protont {
+
+	case 1:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:114
+		{
+			protoVAL.file = &fileNode{syntax: protoDollar[1].syn}
+			protoVAL.file.setRange(protoDollar[1].syn, protoDollar[1].syn)
+			protolex.(*protoLex).res = protoVAL.file
+		}
+	case 2:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:119
+		{
+			protoVAL.file = &fileNode{decls: protoDollar[1].fileDecls}
+			if len(protoDollar[1].fileDecls) > 0 {
+				protoVAL.file.setRange(protoDollar[1].fileDecls[0], protoDollar[1].fileDecls[len(protoDollar[1].fileDecls)-1])
+			}
+			protolex.(*protoLex).res = protoVAL.file
+		}
+	case 3:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:126
+		{
+			protoVAL.file = &fileNode{syntax: protoDollar[1].syn, decls: protoDollar[2].fileDecls}
+			var end node
+			if len(protoDollar[2].fileDecls) > 0 {
+				end = protoDollar[2].fileDecls[len(protoDollar[2].fileDecls)-1]
+			} else {
+				end = protoDollar[1].syn
+			}
+			protoVAL.file.setRange(protoDollar[1].syn, end)
+			protolex.(*protoLex).res = protoVAL.file
+		}
+	case 4:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:137
+		{
+		}
+	case 5:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:140
+		{
+			protoVAL.fileDecls = append(protoDollar[1].fileDecls, protoDollar[2].fileDecls...)
+		}
+	case 7:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:145
+		{
+			protoVAL.fileDecls = []*fileElement{{imp: protoDollar[1].imprt}}
+		}
+	case 8:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:148
+		{
+			protoVAL.fileDecls = []*fileElement{{pkg: protoDollar[1].pkg}}
+		}
+	case 9:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:151
+		{
+			protoVAL.fileDecls = []*fileElement{{option: protoDollar[1].opts[0]}}
+		}
+	case 10:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:154
+		{
+			protoVAL.fileDecls = []*fileElement{{message: protoDollar[1].msg}}
+		}
+	case 11:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:157
+		{
+			protoVAL.fileDecls = []*fileElement{{enum: protoDollar[1].en}}
+		}
+	case 12:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:160
+		{
+			protoVAL.fileDecls = []*fileElement{{extend: protoDollar[1].extend}}
+		}
+	case 13:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:163
+		{
+			protoVAL.fileDecls = []*fileElement{{service: protoDollar[1].svc}}
+		}
+	case 14:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:166
+		{
+			protoVAL.fileDecls = []*fileElement{{empty: protoDollar[1].b}}
+		}
+	case 15:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:170
+		{
+			if protoDollar[3].str.val != "proto2" && protoDollar[3].str.val != "proto3" {
+				lexError(protolex, protoDollar[3].str.start(), "syntax value must be 'proto2' or 'proto3'")
+			}
+			protoVAL.syn = &syntaxNode{syntax: protoDollar[3].str}
+			protoVAL.syn.setRange(protoDollar[1].id, protoDollar[4].b)
+		}
+	case 16:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:178
+		{
+			protoVAL.imprt = &importNode{name: protoDollar[2].str}
+			protoVAL.imprt.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 17:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:182
+		{
+			protoVAL.imprt = &importNode{name: protoDollar[3].str, weak: true}
+			protoVAL.imprt.setRange(protoDollar[1].id, protoDollar[4].b)
+		}
+	case 18:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:186
+		{
+			protoVAL.imprt = &importNode{name: protoDollar[3].str, public: true}
+			protoVAL.imprt.setRange(protoDollar[1].id, protoDollar[4].b)
+		}
+	case 19:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:191
+		{
+			protoVAL.pkg = &packageNode{name: protoDollar[2].id}
+			protoVAL.pkg.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 22:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:199
+		{
+			n := &optionNameNode{parts: protoDollar[2].optNm}
+			n.setRange(protoDollar[2].optNm[0], protoDollar[2].optNm[len(protoDollar[2].optNm)-1])
+			o := &optionNode{name: n, val: protoDollar[4].v}
+			o.setRange(protoDollar[1].id, protoDollar[5].b)
+			protoVAL.opts = []*optionNode{o}
+		}
+	case 23:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:207
+		{
+			protoVAL.optNm = toNameParts(protoDollar[1].id, 0)
+		}
+	case 24:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:210
+		{
+			p := &optionNamePartNode{text: protoDollar[2].id, isExtension: true}
+			p.setRange(protoDollar[1].b, protoDollar[3].b)
+			protoVAL.optNm = []*optionNamePartNode{p}
+		}
+	case 25:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:215
+		{
+			p := &optionNamePartNode{text: protoDollar[2].id, isExtension: true}
+			p.setRange(protoDollar[1].b, protoDollar[3].b)
+			ps := make([]*optionNamePartNode, 1, len(protoDollar[4].optNm)+1)
+			ps[0] = p
+			protoVAL.optNm = append(ps, protoDollar[4].optNm...)
+		}
+	case 27:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:224
+		{
+			protoVAL.optNm = append(protoDollar[1].optNm, protoDollar[2].optNm...)
+		}
+	case 28:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:228
+		{
+			protoVAL.optNm = toNameParts(protoDollar[1].id, 1 /* exclude leading dot */)
+		}
+	case 29:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:231
+		{
+			p := &optionNamePartNode{text: protoDollar[3].id, isExtension: true}
+			p.setRange(protoDollar[2].b, protoDollar[4].b)
+			protoVAL.optNm = []*optionNamePartNode{p}
+		}
+	case 32:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:240
+		{
+			protoVAL.v = protoDollar[1].str
+		}
+	case 33:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:243
+		{
+			protoVAL.v = protoDollar[1].ui
+		}
+	case 34:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:246
+		{
+			protoVAL.v = protoDollar[1].i
+		}
+	case 35:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:249
+		{
+			protoVAL.v = protoDollar[1].f
+		}
+	case 36:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:252
+		{
+			if protoDollar[1].id.val == "true" {
+				protoVAL.v = &boolLiteralNode{basicNode: protoDollar[1].id.basicNode, val: true}
+			} else if protoDollar[1].id.val == "false" {
+				protoVAL.v = &boolLiteralNode{basicNode: protoDollar[1].id.basicNode, val: false}
+			} else if protoDollar[1].id.val == "inf" {
+				f := &floatLiteralNode{val: math.Inf(1)}
+				f.setRange(protoDollar[1].id, protoDollar[1].id)
+				protoVAL.v = f
+			} else if protoDollar[1].id.val == "nan" {
+				f := &floatLiteralNode{val: math.NaN()}
+				f.setRange(protoDollar[1].id, protoDollar[1].id)
+				protoVAL.v = f
+			} else {
+				protoVAL.v = protoDollar[1].id
+			}
+		}
+	case 38:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:271
+		{
+			protoVAL.ui = protoDollar[2].ui
+		}
+	case 39:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:275
+		{
+			if protoDollar[2].ui.val > math.MaxInt64+1 {
+				lexError(protolex, protoDollar[2].ui.start(), fmt.Sprintf("numeric constant %d would underflow (allowed range is %d to %d)", protoDollar[2].ui.val, int64(math.MinInt64), int64(math.MaxInt64)))
+			}
+			protoVAL.i = &negativeIntLiteralNode{val: -int64(protoDollar[2].ui.val)}
+			protoVAL.i.setRange(protoDollar[1].b, protoDollar[2].ui)
+		}
+	case 41:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:284
+		{
+			protoVAL.f = &floatLiteralNode{val: -protoDollar[2].f.val}
+			protoVAL.f.setRange(protoDollar[1].b, protoDollar[2].f)
+		}
+	case 42:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:288
+		{
+			protoVAL.f = &floatLiteralNode{val: protoDollar[2].f.val}
+			protoVAL.f.setRange(protoDollar[1].b, protoDollar[2].f)
+		}
+	case 43:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:292
+		{
+			protoVAL.f = &floatLiteralNode{val: math.Inf(1)}
+			protoVAL.f.setRange(protoDollar[1].b, protoDollar[2].id)
+		}
+	case 44:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:296
+		{
+			protoVAL.f = &floatLiteralNode{val: math.Inf(-1)}
+			protoVAL.f.setRange(protoDollar[1].b, protoDollar[2].id)
+		}
+	case 46:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:302
+		{
+			protoVAL.str = &stringLiteralNode{val: protoDollar[1].str.val + protoDollar[2].str.val}
+			protoVAL.str.setRange(protoDollar[1].str, protoDollar[2].str)
+		}
+	case 47:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:307
+		{
+			a := &aggregateLiteralNode{elements: protoDollar[2].agg}
+			a.setRange(protoDollar[1].b, protoDollar[3].b)
+			protoVAL.v = a
+		}
+	case 49:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:314
+		{
+			protoVAL.agg = append(protoDollar[1].agg, protoDollar[2].agg...)
+		}
+	case 50:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:317
+		{
+			protoVAL.agg = nil
+		}
+	case 52:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:322
+		{
+			protoVAL.agg = protoDollar[1].agg
+		}
+	case 53:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:325
+		{
+			protoVAL.agg = protoDollar[1].agg
+		}
+	case 54:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:329
+		{
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: protoDollar[3].v}
+			a.setRange(protoDollar[1].aggName, protoDollar[3].v)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 55:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:334
+		{
+			s := &sliceLiteralNode{}
+			s.setRange(protoDollar[3].b, protoDollar[4].b)
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s}
+			a.setRange(protoDollar[1].aggName, protoDollar[4].b)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 56:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:341
+		{
+			s := &sliceLiteralNode{elements: protoDollar[4].sl}
+			s.setRange(protoDollar[3].b, protoDollar[5].b)
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s}
+			a.setRange(protoDollar[1].aggName, protoDollar[5].b)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 57:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:348
+		{
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: protoDollar[3].v}
+			a.setRange(protoDollar[1].aggName, protoDollar[3].v)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 58:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:353
+		{
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: protoDollar[2].v}
+			a.setRange(protoDollar[1].aggName, protoDollar[2].v)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 59:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:358
+		{
+			s := &aggregateLiteralNode{elements: protoDollar[4].agg}
+			s.setRange(protoDollar[3].b, protoDollar[5].b)
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s}
+			a.setRange(protoDollar[1].aggName, protoDollar[5].b)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 60:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:365
+		{
+			s := &aggregateLiteralNode{elements: protoDollar[3].agg}
+			s.setRange(protoDollar[2].b, protoDollar[4].b)
+			a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s}
+			a.setRange(protoDollar[1].aggName, protoDollar[4].b)
+			protoVAL.agg = []*aggregateEntryNode{a}
+		}
+	case 61:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:373
+		{
+			protoVAL.aggName = &aggregateNameNode{name: protoDollar[1].id}
+			protoVAL.aggName.setRange(protoDollar[1].id, protoDollar[1].id)
+		}
+	case 62:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:377
+		{
+			protoVAL.aggName = &aggregateNameNode{name: protoDollar[2].id, isExtension: true}
+			protoVAL.aggName.setRange(protoDollar[1].b, protoDollar[3].b)
+		}
+	case 63:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:382
+		{
+			protoVAL.sl = []valueNode{protoDollar[1].v}
+		}
+	case 64:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:385
+		{
+			protoVAL.sl = append(protoDollar[1].sl, protoDollar[3].v)
+		}
+	case 65:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:388
+		{
+			protoVAL.sl = append(protoDollar[1].sl, protoDollar[3].v)
+		}
+	case 66:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:391
+		{
+			s := &aggregateLiteralNode{elements: protoDollar[2].agg}
+			s.setRange(protoDollar[1].b, protoDollar[3].b)
+			protoVAL.sl = []valueNode{s}
+		}
+	case 67:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:396
+		{
+			s := &aggregateLiteralNode{elements: protoDollar[4].agg}
+			s.setRange(protoDollar[3].b, protoDollar[5].b)
+			protoVAL.sl = append(protoDollar[1].sl, s)
+		}
+	case 68:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:401
+		{
+			s := &aggregateLiteralNode{elements: protoDollar[4].agg}
+			s.setRange(protoDollar[3].b, protoDollar[5].b)
+			protoVAL.sl = append(protoDollar[1].sl, s)
+		}
+	case 71:
+		protoDollar = protoS[protopt-6 : protopt+1]
+//line proto.y:410
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, required: true}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[6].b)
+		}
+	case 72:
+		protoDollar = protoS[protopt-6 : protopt+1]
+//line proto.y:416
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[6].b)
+		}
+	case 73:
+		protoDollar = protoS[protopt-6 : protopt+1]
+//line proto.y:422
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, repeated: true}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[6].b)
+		}
+	case 74:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:428
+		{
+			checkTag(protolex, protoDollar[4].ui.start(), protoDollar[4].ui.val)
+			protoVAL.fld = &fieldNode{fldType: protoDollar[1].id, name: protoDollar[2].id, tag: protoDollar[4].ui}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 75:
+		protoDollar = protoS[protopt-9 : protopt+1]
+//line proto.y:433
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, required: true}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui, options: protoDollar[7].opts}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[9].b)
+		}
+	case 76:
+		protoDollar = protoS[protopt-9 : protopt+1]
+//line proto.y:439
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui, options: protoDollar[7].opts}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[9].b)
+		}
+	case 77:
+		protoDollar = protoS[protopt-9 : protopt+1]
+//line proto.y:445
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, repeated: true}
+			protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].id, name: protoDollar[3].id, tag: protoDollar[5].ui, options: protoDollar[7].opts}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[9].b)
+		}
+	case 78:
+		protoDollar = protoS[protopt-8 : protopt+1]
+//line proto.y:451
+		{
+			checkTag(protolex, protoDollar[4].ui.start(), protoDollar[4].ui.val)
+			protoVAL.fld = &fieldNode{fldType: protoDollar[1].id, name: protoDollar[2].id, tag: protoDollar[4].ui, options: protoDollar[6].opts}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[8].b)
+		}
+	case 79:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:457
+		{
+			protoVAL.opts = append(protoDollar[1].opts, protoDollar[3].opts...)
+		}
+	case 81:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:462
+		{
+			n := &optionNameNode{parts: protoDollar[1].optNm}
+			n.setRange(protoDollar[1].optNm[0], protoDollar[1].optNm[len(protoDollar[1].optNm)-1])
+			o := &optionNode{name: n, val: protoDollar[3].v}
+			o.setRange(protoDollar[1].optNm[0], protoDollar[3].v)
+			protoVAL.opts = []*optionNode{o}
+		}
+	case 82:
+		protoDollar = protoS[protopt-8 : protopt+1]
+//line proto.y:470
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			if !unicode.IsUpper(rune(protoDollar[3].id.val[0])) {
+				lexError(protolex, protoDollar[3].id.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", protoDollar[3].id.val))
+			}
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, required: true}
+			protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].ui, decls: protoDollar[7].msgDecls}
+			protoVAL.grp.setRange(protoDollar[1].id, protoDollar[8].b)
+		}
+	case 83:
+		protoDollar = protoS[protopt-8 : protopt+1]
+//line proto.y:479
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			if !unicode.IsUpper(rune(protoDollar[3].id.val[0])) {
+				lexError(protolex, protoDollar[3].id.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", protoDollar[3].id.val))
+			}
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode}
+			protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].ui, decls: protoDollar[7].msgDecls}
+			protoVAL.grp.setRange(protoDollar[1].id, protoDollar[8].b)
+		}
+	case 84:
+		protoDollar = protoS[protopt-8 : protopt+1]
+//line proto.y:488
+		{
+			checkTag(protolex, protoDollar[5].ui.start(), protoDollar[5].ui.val)
+			if !unicode.IsUpper(rune(protoDollar[3].id.val[0])) {
+				lexError(protolex, protoDollar[3].id.start(), fmt.Sprintf("group %s should have a name that starts with a capital letter", protoDollar[3].id.val))
+			}
+			lbl := &labelNode{basicNode: protoDollar[1].id.basicNode, repeated: true}
+			protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].ui, decls: protoDollar[7].msgDecls}
+			protoVAL.grp.setRange(protoDollar[1].id, protoDollar[8].b)
+		}
+	case 85:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:498
+		{
+			c := 0
+			for _, el := range protoDollar[4].ooDecls {
+				if el.field != nil {
+					c++
+				}
+			}
+			if c == 0 {
+				lexError(protolex, protoDollar[1].id.start(), "oneof must contain at least one field")
+			}
+			protoVAL.oo = &oneOfNode{name: protoDollar[2].id, decls: protoDollar[4].ooDecls}
+			protoVAL.oo.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 86:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:512
+		{
+			protoVAL.ooDecls = append(protoDollar[1].ooDecls, protoDollar[2].ooDecls...)
+		}
+	case 88:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:516
+		{
+			protoVAL.ooDecls = nil
+		}
+	case 89:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:520
+		{
+			protoVAL.ooDecls = []*oneOfElement{{option: protoDollar[1].opts[0]}}
+		}
+	case 90:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:523
+		{
+			protoVAL.ooDecls = []*oneOfElement{{field: protoDollar[1].fld}}
+		}
+	case 91:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:526
+		{
+			protoVAL.ooDecls = []*oneOfElement{{empty: protoDollar[1].b}}
+		}
+	case 92:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:530
+		{
+			checkTag(protolex, protoDollar[4].ui.start(), protoDollar[4].ui.val)
+			protoVAL.fld = &fieldNode{fldType: protoDollar[1].id, name: protoDollar[2].id, tag: protoDollar[4].ui}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 93:
+		protoDollar = protoS[protopt-8 : protopt+1]
+//line proto.y:535
+		{
+			checkTag(protolex, protoDollar[4].ui.start(), protoDollar[4].ui.val)
+			protoVAL.fld = &fieldNode{fldType: protoDollar[1].id, name: protoDollar[2].id, tag: protoDollar[4].ui, options: protoDollar[6].opts}
+			protoVAL.fld.setRange(protoDollar[1].id, protoDollar[8].b)
+		}
+	case 94:
+		protoDollar = protoS[protopt-10 : protopt+1]
+//line proto.y:541
+		{
+			checkTag(protolex, protoDollar[9].ui.start(), protoDollar[9].ui.val)
+			protoVAL.mapFld = &mapFieldNode{mapKeyword: protoDollar[1].id, keyType: protoDollar[3].id, valueType: protoDollar[5].id, name: protoDollar[7].id, tag: protoDollar[9].ui}
+			protoVAL.mapFld.setRange(protoDollar[1].id, protoDollar[10].b)
+		}
+	case 95:
+		protoDollar = protoS[protopt-13 : protopt+1]
+//line proto.y:546
+		{
+			checkTag(protolex, protoDollar[9].ui.start(), protoDollar[9].ui.val)
+			protoVAL.mapFld = &mapFieldNode{mapKeyword: protoDollar[1].id, keyType: protoDollar[3].id, valueType: protoDollar[5].id, name: protoDollar[7].id, tag: protoDollar[9].ui, options: protoDollar[11].opts}
+			protoVAL.mapFld.setRange(protoDollar[1].id, protoDollar[13].b)
+		}
+	case 108:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:565
+		{
+			protoVAL.ext = &extensionRangeNode{ranges: protoDollar[2].rngs}
+			protoVAL.ext.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 109:
+		protoDollar = protoS[protopt-6 : protopt+1]
+//line proto.y:569
+		{
+			protoVAL.ext = &extensionRangeNode{ranges: protoDollar[2].rngs, options: protoDollar[4].opts}
+			protoVAL.ext.setRange(protoDollar[1].id, protoDollar[6].b)
+		}
+	case 110:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:574
+		{
+			protoVAL.rngs = append(protoDollar[1].rngs, protoDollar[3].rngs...)
+		}
+	case 112:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:579
+		{
+			if protoDollar[1].ui.val > internal.MaxTag {
+				lexError(protolex, protoDollar[1].ui.start(), fmt.Sprintf("range includes out-of-range tag: %d (should be between 0 and %d)", protoDollar[1].ui.val, internal.MaxTag))
+			}
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[1].ui, st: int32(protoDollar[1].ui.val), en: int32(protoDollar[1].ui.val)}
+			r.setRange(protoDollar[1].ui, protoDollar[1].ui)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 113:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:587
+		{
+			if protoDollar[1].ui.val > internal.MaxTag {
+				lexError(protolex, protoDollar[1].ui.start(), fmt.Sprintf("range start is out-of-range tag: %d (should be between 0 and %d)", protoDollar[1].ui.val, internal.MaxTag))
+			}
+			if protoDollar[3].ui.val > internal.MaxTag {
+				lexError(protolex, protoDollar[3].ui.start(), fmt.Sprintf("range end is out-of-range tag: %d (should be between 0 and %d)", protoDollar[3].ui.val, internal.MaxTag))
+			}
+			if protoDollar[1].ui.val > protoDollar[3].ui.val {
+				lexError(protolex, protoDollar[1].ui.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", protoDollar[1].ui.val, protoDollar[3].ui.val))
+			}
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[3].ui, st: int32(protoDollar[1].ui.val), en: int32(protoDollar[3].ui.val)}
+			r.setRange(protoDollar[1].ui, protoDollar[3].ui)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 114:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:601
+		{
+			if protoDollar[1].ui.val > internal.MaxTag {
+				lexError(protolex, protoDollar[1].ui.start(), fmt.Sprintf("range start is out-of-range tag: %d (should be between 0 and %d)", protoDollar[1].ui.val, internal.MaxTag))
+			}
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[3].id, st: int32(protoDollar[1].ui.val), en: internal.MaxTag}
+			r.setRange(protoDollar[1].ui, protoDollar[3].id)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 115:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:610
+		{
+			protoVAL.rngs = append(protoDollar[1].rngs, protoDollar[3].rngs...)
+		}
+	case 117:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:615
+		{
+			checkUint64InInt32Range(protolex, protoDollar[1].ui.start(), protoDollar[1].ui.val)
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[1].ui, st: int32(protoDollar[1].ui.val), en: int32(protoDollar[1].ui.val)}
+			r.setRange(protoDollar[1].ui, protoDollar[1].ui)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 118:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:621
+		{
+			checkInt64InInt32Range(protolex, protoDollar[1].i.start(), protoDollar[1].i.val)
+			r := &rangeNode{stNode: protoDollar[1].i, enNode: protoDollar[1].i, st: int32(protoDollar[1].i.val), en: int32(protoDollar[1].i.val)}
+			r.setRange(protoDollar[1].i, protoDollar[1].i)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 119:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:627
+		{
+			checkUint64InInt32Range(protolex, protoDollar[1].ui.start(), protoDollar[1].ui.val)
+			checkUint64InInt32Range(protolex, protoDollar[3].ui.start(), protoDollar[3].ui.val)
+			if protoDollar[1].ui.val > protoDollar[3].ui.val {
+				lexError(protolex, protoDollar[1].ui.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", protoDollar[1].ui.val, protoDollar[3].ui.val))
+			}
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[3].ui, st: int32(protoDollar[1].ui.val), en: int32(protoDollar[3].ui.val)}
+			r.setRange(protoDollar[1].ui, protoDollar[3].ui)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 120:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:637
+		{
+			checkInt64InInt32Range(protolex, protoDollar[1].i.start(), protoDollar[1].i.val)
+			checkInt64InInt32Range(protolex, protoDollar[3].i.start(), protoDollar[3].i.val)
+			if protoDollar[1].i.val > protoDollar[3].i.val {
+				lexError(protolex, protoDollar[1].i.start(), fmt.Sprintf("range, %d to %d, is invalid: start must be <= end", protoDollar[1].i.val, protoDollar[3].i.val))
+			}
+			r := &rangeNode{stNode: protoDollar[1].i, enNode: protoDollar[3].i, st: int32(protoDollar[1].i.val), en: int32(protoDollar[3].i.val)}
+			r.setRange(protoDollar[1].i, protoDollar[3].i)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 121:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:647
+		{
+			checkInt64InInt32Range(protolex, protoDollar[1].i.start(), protoDollar[1].i.val)
+			checkUint64InInt32Range(protolex, protoDollar[3].ui.start(), protoDollar[3].ui.val)
+			r := &rangeNode{stNode: protoDollar[1].i, enNode: protoDollar[3].ui, st: int32(protoDollar[1].i.val), en: int32(protoDollar[3].ui.val)}
+			r.setRange(protoDollar[1].i, protoDollar[3].ui)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 122:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:654
+		{
+			checkUint64InInt32Range(protolex, protoDollar[1].ui.start(), protoDollar[1].ui.val)
+			r := &rangeNode{stNode: protoDollar[1].ui, enNode: protoDollar[3].id, st: int32(protoDollar[1].ui.val), en: math.MaxInt32}
+			r.setRange(protoDollar[1].ui, protoDollar[3].id)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 123:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:660
+		{
+			checkInt64InInt32Range(protolex, protoDollar[1].i.start(), protoDollar[1].i.val)
+			r := &rangeNode{stNode: protoDollar[1].i, enNode: protoDollar[3].id, st: int32(protoDollar[1].i.val), en: math.MaxInt32}
+			r.setRange(protoDollar[1].i, protoDollar[3].id)
+			protoVAL.rngs = []*rangeNode{r}
+		}
+	case 124:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:667
+		{
+			protoVAL.resvd = &reservedNode{ranges: protoDollar[2].rngs}
+			protoVAL.resvd.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 126:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:673
+		{
+			protoVAL.resvd = &reservedNode{ranges: protoDollar[2].rngs}
+			protoVAL.resvd.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 128:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:679
+		{
+			rsvd := map[string]struct{}{}
+			for _, n := range protoDollar[2].names {
+				if _, ok := rsvd[n.val]; ok {
+					lexError(protolex, n.start(), fmt.Sprintf("name %q is reserved multiple times", n.val))
+					break
+				}
+				rsvd[n.val] = struct{}{}
+			}
+			protoVAL.resvd = &reservedNode{names: protoDollar[2].names}
+			protoVAL.resvd.setRange(protoDollar[1].id, protoDollar[3].b)
+		}
+	case 129:
+		protoDollar = protoS[protopt-3 : protopt+1]
+//line proto.y:692
+		{
+			protoVAL.names = append(protoDollar[1].names, protoDollar[3].str)
+		}
+	case 130:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:695
+		{
+			protoVAL.names = []*stringLiteralNode{protoDollar[1].str}
+		}
+	case 131:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:699
+		{
+			c := 0
+			for _, el := range protoDollar[4].enDecls {
+				if el.value != nil {
+					c++
+				}
+			}
+			if c == 0 {
+				lexError(protolex, protoDollar[1].id.start(), "enums must define at least one value")
+			}
+			protoVAL.en = &enumNode{name: protoDollar[2].id, decls: protoDollar[4].enDecls}
+			protoVAL.en.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 132:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:713
+		{
+			protoVAL.enDecls = append(protoDollar[1].enDecls, protoDollar[2].enDecls...)
+		}
+	case 134:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:717
+		{
+			protoVAL.enDecls = nil
+		}
+	case 135:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:721
+		{
+			protoVAL.enDecls = []*enumElement{{option: protoDollar[1].opts[0]}}
+		}
+	case 136:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:724
+		{
+			protoVAL.enDecls = []*enumElement{{value: protoDollar[1].env}}
+		}
+	case 137:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:727
+		{
+			protoVAL.enDecls = []*enumElement{{reserved: protoDollar[1].resvd}}
+		}
+	case 138:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:730
+		{
+			protoVAL.enDecls = []*enumElement{{empty: protoDollar[1].b}}
+		}
+	case 139:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:734
+		{
+			checkUint64InInt32Range(protolex, protoDollar[3].ui.start(), protoDollar[3].ui.val)
+			protoVAL.env = &enumValueNode{name: protoDollar[1].id, numberP: protoDollar[3].ui}
+			protoVAL.env.setRange(protoDollar[1].id, protoDollar[4].b)
+		}
+	case 140:
+		protoDollar = protoS[protopt-7 : protopt+1]
+//line proto.y:739
+		{
+			checkUint64InInt32Range(protolex, protoDollar[3].ui.start(), protoDollar[3].ui.val)
+			protoVAL.env = &enumValueNode{name: protoDollar[1].id, numberP: protoDollar[3].ui, options: protoDollar[5].opts}
+			protoVAL.env.setRange(protoDollar[1].id, protoDollar[7].b)
+		}
+	case 141:
+		protoDollar = protoS[protopt-4 : protopt+1]
+//line proto.y:744
+		{
+			checkInt64InInt32Range(protolex, protoDollar[3].i.start(), protoDollar[3].i.val)
+			protoVAL.env = &enumValueNode{name: protoDollar[1].id, numberN: protoDollar[3].i}
+			protoVAL.env.setRange(protoDollar[1].id, protoDollar[4].b)
+		}
+	case 142:
+		protoDollar = protoS[protopt-7 : protopt+1]
+//line proto.y:749
+		{
+			checkInt64InInt32Range(protolex, protoDollar[3].i.start(), protoDollar[3].i.val)
+			protoVAL.env = &enumValueNode{name: protoDollar[1].id, numberN: protoDollar[3].i, options: protoDollar[5].opts}
+			protoVAL.env.setRange(protoDollar[1].id, protoDollar[7].b)
+		}
+	case 143:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:755
+		{
+			protoVAL.msg = &messageNode{name: protoDollar[2].id, decls: protoDollar[4].msgDecls}
+			protoVAL.msg.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 144:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:760
+		{
+			protoVAL.msgDecls = append(protoDollar[1].msgDecls, protoDollar[2].msgDecls...)
+		}
+	case 146:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:764
+		{
+			protoVAL.msgDecls = nil
+		}
+	case 147:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:768
+		{
+			protoVAL.msgDecls = []*messageElement{{field: protoDollar[1].fld}}
+		}
+	case 148:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:771
+		{
+			protoVAL.msgDecls = []*messageElement{{enum: protoDollar[1].en}}
+		}
+	case 149:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:774
+		{
+			protoVAL.msgDecls = []*messageElement{{nested: protoDollar[1].msg}}
+		}
+	case 150:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:777
+		{
+			protoVAL.msgDecls = []*messageElement{{extend: protoDollar[1].extend}}
+		}
+	case 151:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:780
+		{
+			protoVAL.msgDecls = []*messageElement{{extensionRange: protoDollar[1].ext}}
+		}
+	case 152:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:783
+		{
+			protoVAL.msgDecls = []*messageElement{{group: protoDollar[1].grp}}
+		}
+	case 153:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:786
+		{
+			protoVAL.msgDecls = []*messageElement{{option: protoDollar[1].opts[0]}}
+		}
+	case 154:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:789
+		{
+			protoVAL.msgDecls = []*messageElement{{oneOf: protoDollar[1].oo}}
+		}
+	case 155:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:792
+		{
+			protoVAL.msgDecls = []*messageElement{{mapField: protoDollar[1].mapFld}}
+		}
+	case 156:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:795
+		{
+			protoVAL.msgDecls = []*messageElement{{reserved: protoDollar[1].resvd}}
+		}
+	case 157:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:798
+		{
+			protoVAL.msgDecls = []*messageElement{{empty: protoDollar[1].b}}
+		}
+	case 158:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:802
+		{
+			c := 0
+			for _, el := range protoDollar[4].extDecls {
+				if el.field != nil || el.group != nil {
+					c++
+				}
+			}
+			if c == 0 {
+				lexError(protolex, protoDollar[1].id.start(), "extend sections must define at least one extension")
+			}
+			protoVAL.extend = &extendNode{extendee: protoDollar[2].id, decls: protoDollar[4].extDecls}
+			protoVAL.extend.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 159:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:816
+		{
+			protoVAL.extDecls = append(protoDollar[1].extDecls, protoDollar[2].extDecls...)
+		}
+	case 161:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:820
+		{
+			protoVAL.extDecls = nil
+		}
+	case 162:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:824
+		{
+			protoVAL.extDecls = []*extendElement{{field: protoDollar[1].fld}}
+		}
+	case 163:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:827
+		{
+			protoVAL.extDecls = []*extendElement{{group: protoDollar[1].grp}}
+		}
+	case 164:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:830
+		{
+			protoVAL.extDecls = []*extendElement{{empty: protoDollar[1].b}}
+		}
+	case 165:
+		protoDollar = protoS[protopt-5 : protopt+1]
+//line proto.y:834
+		{
+			protoVAL.svc = &serviceNode{name: protoDollar[2].id, decls: protoDollar[4].svcDecls}
+			protoVAL.svc.setRange(protoDollar[1].id, protoDollar[5].b)
+		}
+	case 166:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:839
+		{
+			protoVAL.svcDecls = append(protoDollar[1].svcDecls, protoDollar[2].svcDecls...)
+		}
+	case 168:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:843
+		{
+			protoVAL.svcDecls = nil
+		}
+	case 169:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:850
+		{
+			protoVAL.svcDecls = []*serviceElement{{option: protoDollar[1].opts[0]}}
+		}
+	case 170:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:853
+		{
+			protoVAL.svcDecls = []*serviceElement{{rpc: protoDollar[1].mtd}}
+		}
+	case 171:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:856
+		{
+			protoVAL.svcDecls = []*serviceElement{{empty: protoDollar[1].b}}
+		}
+	case 172:
+		protoDollar = protoS[protopt-10 : protopt+1]
+//line proto.y:860
+		{
+			protoVAL.mtd = &methodNode{name: protoDollar[2].id, input: protoDollar[4].rpcType, output: protoDollar[8].rpcType}
+			protoVAL.mtd.setRange(protoDollar[1].id, protoDollar[10].b)
+		}
+	case 173:
+		protoDollar = protoS[protopt-12 : protopt+1]
+//line proto.y:864
+		{
+			protoVAL.mtd = &methodNode{name: protoDollar[2].id, input: protoDollar[4].rpcType, output: protoDollar[8].rpcType, options: protoDollar[11].opts}
+			protoVAL.mtd.setRange(protoDollar[1].id, protoDollar[12].b)
+		}
+	case 174:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:869
+		{
+			protoVAL.rpcType = &rpcTypeNode{msgType: protoDollar[2].id, streamKeyword: protoDollar[1].id}
+			protoVAL.rpcType.setRange(protoDollar[1].id, protoDollar[2].id)
+		}
+	case 175:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:873
+		{
+			protoVAL.rpcType = &rpcTypeNode{msgType: protoDollar[1].id}
+			protoVAL.rpcType.setRange(protoDollar[1].id, protoDollar[1].id)
+		}
+	case 176:
+		protoDollar = protoS[protopt-2 : protopt+1]
+//line proto.y:878
+		{
+			protoVAL.opts = append(protoDollar[1].opts, protoDollar[2].opts...)
+		}
+	case 178:
+		protoDollar = protoS[protopt-0 : protopt+1]
+//line proto.y:882
+		{
+			protoVAL.opts = []*optionNode{}
+		}
+	case 179:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:886
+		{
+			protoVAL.opts = protoDollar[1].opts
+		}
+	case 180:
+		protoDollar = protoS[protopt-1 : protopt+1]
+//line proto.y:889
+		{
+			protoVAL.opts = []*optionNode{}
+		}
+	}
+	goto protostack /* stack new state and value */
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/source_code_info.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/source_code_info.go
new file mode 100644
index 0000000..d0a61c2
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/source_code_info.go
@@ -0,0 +1,612 @@
+package protoparse
+
+import (
+	"bytes"
+	"reflect"
+	"sort"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+
+	"github.com/jhump/protoreflect/desc/internal"
+)
+
+func (r *parseResult) generateSourceCodeInfo() *dpb.SourceCodeInfo {
+	if r.nodes == nil {
+		// skip files that do not have AST info (these will be files
+		// that came from well-known descriptors, instead of from source)
+		return nil
+	}
+
+	sci := sourceCodeInfo{commentsUsed: map[*comment]struct{}{}}
+	path := make([]int32, 0, 10)
+
+	fn := r.getFileNode(r.fd).(*fileNode)
+	if fn.syntax != nil {
+		sci.newLoc(fn.syntax, append(path, internal.File_syntaxTag))
+	}
+	if fn.pkg != nil {
+		sci.newLoc(fn.pkg, append(path, internal.File_packageTag))
+	}
+	for i, imp := range fn.imports {
+		sci.newLoc(imp, append(path, internal.File_dependencyTag, int32(i)))
+	}
+
+	// file options
+	r.generateSourceCodeInfoForOptions(&sci, fn.decls, func(n interface{}) *optionNode {
+		return n.(*fileElement).option
+	}, r.fd.Options.GetUninterpretedOption(), append(path, internal.File_optionsTag))
+
+	// message types
+	for i, msg := range r.fd.GetMessageType() {
+		r.generateSourceCodeInfoForMessage(&sci, msg, append(path, internal.File_messagesTag, int32(i)))
+	}
+
+	// enum types
+	for i, enum := range r.fd.GetEnumType() {
+		r.generateSourceCodeInfoForEnum(&sci, enum, append(path, internal.File_enumsTag, int32(i)))
+	}
+
+	// extension fields
+	for i, ext := range r.fd.GetExtension() {
+		r.generateSourceCodeInfoForField(&sci, ext, append(path, internal.File_extensionsTag, int32(i)))
+	}
+
+	// services and methods
+	for i, svc := range r.fd.GetService() {
+		n := r.getServiceNode(svc).(*serviceNode)
+		svcPath := append(path, internal.File_servicesTag, int32(i))
+		sci.newLoc(n, svcPath)
+		sci.newLoc(n.name, append(svcPath, internal.Service_nameTag))
+
+		// service options
+		r.generateSourceCodeInfoForOptions(&sci, n.decls, func(n interface{}) *optionNode {
+			return n.(*serviceElement).option
+		}, svc.Options.GetUninterpretedOption(), append(svcPath, internal.Service_optionsTag))
+
+		// methods
+		for j, mtd := range svc.GetMethod() {
+			mn := r.getMethodNode(mtd).(*methodNode)
+			mtdPath := append(svcPath, internal.Service_methodsTag, int32(j))
+			sci.newLoc(mn, mtdPath)
+			sci.newLoc(mn.name, append(mtdPath, internal.Method_nameTag))
+
+			sci.newLoc(mn.input.msgType, append(mtdPath, internal.Method_inputTag))
+			if mn.input.streamKeyword != nil {
+				sci.newLoc(mn.input.streamKeyword, append(mtdPath, internal.Method_inputStreamTag))
+			}
+			sci.newLoc(mn.output.msgType, append(mtdPath, internal.Method_outputTag))
+			if mn.output.streamKeyword != nil {
+				sci.newLoc(mn.output.streamKeyword, append(mtdPath, internal.Method_outputStreamTag))
+			}
+
+			// method options
+			r.generateSourceCodeInfoForOptions(&sci, mn.options, func(n interface{}) *optionNode {
+				return n.(*optionNode)
+			}, mtd.Options.GetUninterpretedOption(), append(mtdPath, internal.Method_optionsTag))
+		}
+	}
+	return &dpb.SourceCodeInfo{Location: sci.generateLocs()}
+}
+
+func (r *parseResult) generateSourceCodeInfoForOptions(sci *sourceCodeInfo, elements interface{}, extractor func(interface{}) *optionNode, uninterp []*dpb.UninterpretedOption, path []int32) {
+	// Known options are option node elements that have a corresponding
+	// path in r.interpretedOptions. We'll do those first.
+	rv := reflect.ValueOf(elements)
+	for i := 0; i < rv.Len(); i++ {
+		on := extractor(rv.Index(i).Interface())
+		if on == nil {
+			continue
+		}
+		optPath := r.interpretedOptions[on]
+		if len(optPath) > 0 {
+			p := path
+			if optPath[0] == -1 {
+				// used by "default" and "json_name" field pseudo-options
+				// to attribute path to parent element (since those are
+				// stored directly on the descriptor, not its options)
+				p = make([]int32, len(path)-1)
+				copy(p, path)
+				optPath = optPath[1:]
+			}
+			sci.newLoc(on, append(p, optPath...))
+		}
+	}
+
+	// Now uninterpreted options
+	for i, uo := range uninterp {
+		optPath := append(path, internal.UninterpretedOptionsTag, int32(i))
+		on := r.getOptionNode(uo).(*optionNode)
+		sci.newLoc(on, optPath)
+
+		var valTag int32
+		switch {
+		case uo.IdentifierValue != nil:
+			valTag = internal.Uninterpreted_identTag
+		case uo.PositiveIntValue != nil:
+			valTag = internal.Uninterpreted_posIntTag
+		case uo.NegativeIntValue != nil:
+			valTag = internal.Uninterpreted_negIntTag
+		case uo.DoubleValue != nil:
+			valTag = internal.Uninterpreted_doubleTag
+		case uo.StringValue != nil:
+			valTag = internal.Uninterpreted_stringTag
+		case uo.AggregateValue != nil:
+			valTag = internal.Uninterpreted_aggregateTag
+		}
+		if valTag != 0 {
+			sci.newLoc(on.val, append(optPath, valTag))
+		}
+
+		for j, n := range uo.Name {
+			optNmPath := append(optPath, internal.Uninterpreted_nameTag, int32(j))
+			nn := r.getOptionNamePartNode(n).(*optionNamePartNode)
+			sci.newLoc(nn, optNmPath)
+			sci.newLoc(nn.text, append(optNmPath, internal.UninterpretedName_nameTag))
+		}
+	}
+}
+
+func (r *parseResult) generateSourceCodeInfoForMessage(sci *sourceCodeInfo, msg *dpb.DescriptorProto, path []int32) {
+	n := r.getMessageNode(msg)
+	sci.newLoc(n, path)
+
+	var decls []*messageElement
+	var resvdNames []*stringLiteralNode
+	switch n := n.(type) {
+	case *messageNode:
+		decls = n.decls
+		resvdNames = n.reserved
+	case *groupNode:
+		decls = n.decls
+		resvdNames = n.reserved
+	}
+	if decls == nil {
+		// map entry so nothing else to do
+		return
+	}
+
+	sci.newLoc(n.messageName(), append(path, internal.Message_nameTag))
+
+	// message options
+	r.generateSourceCodeInfoForOptions(sci, decls, func(n interface{}) *optionNode {
+		return n.(*messageElement).option
+	}, msg.Options.GetUninterpretedOption(), append(path, internal.Message_optionsTag))
+
+	// fields
+	for i, fld := range msg.GetField() {
+		r.generateSourceCodeInfoForField(sci, fld, append(path, internal.Message_fieldsTag, int32(i)))
+	}
+
+	// one-ofs
+	for i, ood := range msg.GetOneofDecl() {
+		oon := r.getOneOfNode(ood).(*oneOfNode)
+		ooPath := append(path, internal.Message_oneOfsTag, int32(i))
+		sci.newLoc(oon, ooPath)
+		sci.newLoc(oon.name, append(ooPath, internal.OneOf_nameTag))
+
+		// one-of options
+		r.generateSourceCodeInfoForOptions(sci, oon.decls, func(n interface{}) *optionNode {
+			return n.(*oneOfElement).option
+		}, ood.Options.GetUninterpretedOption(), append(ooPath, internal.OneOf_optionsTag))
+	}
+
+	// nested messages
+	for i, nm := range msg.GetNestedType() {
+		r.generateSourceCodeInfoForMessage(sci, nm, append(path, internal.Message_nestedMessagesTag, int32(i)))
+	}
+
+	// nested enums
+	for i, enum := range msg.GetEnumType() {
+		r.generateSourceCodeInfoForEnum(sci, enum, append(path, internal.Message_enumsTag, int32(i)))
+	}
+
+	// nested extensions
+	for i, ext := range msg.GetExtension() {
+		r.generateSourceCodeInfoForField(sci, ext, append(path, internal.Message_extensionsTag, int32(i)))
+	}
+
+	// extension ranges
+	for i, er := range msg.ExtensionRange {
+		rangePath := append(path, internal.Message_extensionRangeTag, int32(i))
+		rn := r.getExtensionRangeNode(er).(*rangeNode)
+		sci.newLoc(rn, rangePath)
+		sci.newLoc(rn.stNode, append(rangePath, internal.ExtensionRange_startTag))
+		if rn.stNode != rn.enNode {
+			sci.newLoc(rn.enNode, append(rangePath, internal.ExtensionRange_endTag))
+		}
+		// now we have to find the extension decl and options that correspond to this range :(
+		for _, d := range decls {
+			found := false
+			if d.extensionRange != nil {
+				for _, r := range d.extensionRange.ranges {
+					if rn == r {
+						found = true
+						break
+					}
+				}
+			}
+			if found {
+				r.generateSourceCodeInfoForOptions(sci, d.extensionRange.options, func(n interface{}) *optionNode {
+					return n.(*optionNode)
+				}, er.Options.GetUninterpretedOption(), append(rangePath, internal.ExtensionRange_optionsTag))
+				break
+			}
+		}
+	}
+
+	// reserved ranges
+	for i, rr := range msg.ReservedRange {
+		rangePath := append(path, internal.Message_reservedRangeTag, int32(i))
+		rn := r.getMessageReservedRangeNode(rr).(*rangeNode)
+		sci.newLoc(rn, rangePath)
+		sci.newLoc(rn.stNode, append(rangePath, internal.ReservedRange_startTag))
+		if rn.stNode != rn.enNode {
+			sci.newLoc(rn.enNode, append(rangePath, internal.ReservedRange_endTag))
+		}
+	}
+
+	// reserved names
+	for i, n := range resvdNames {
+		sci.newLoc(n, append(path, internal.Message_reservedNameTag, int32(i)))
+	}
+}
+
+func (r *parseResult) generateSourceCodeInfoForEnum(sci *sourceCodeInfo, enum *dpb.EnumDescriptorProto, path []int32) {
+	n := r.getEnumNode(enum).(*enumNode)
+	sci.newLoc(n, path)
+	sci.newLoc(n.name, append(path, internal.Enum_nameTag))
+
+	// enum options
+	r.generateSourceCodeInfoForOptions(sci, n.decls, func(n interface{}) *optionNode {
+		return n.(*enumElement).option
+	}, enum.Options.GetUninterpretedOption(), append(path, internal.Enum_optionsTag))
+
+	// enum values
+	for j, ev := range enum.GetValue() {
+		evn := r.getEnumValueNode(ev).(*enumValueNode)
+		evPath := append(path, internal.Enum_valuesTag, int32(j))
+		sci.newLoc(evn, evPath)
+		sci.newLoc(evn.name, append(evPath, internal.EnumVal_nameTag))
+		sci.newLoc(evn.getNumber(), append(evPath, internal.EnumVal_numberTag))
+
+		// enum value options
+		r.generateSourceCodeInfoForOptions(sci, evn.options, func(n interface{}) *optionNode {
+			return n.(*optionNode)
+		}, ev.Options.GetUninterpretedOption(), append(evPath, internal.EnumVal_optionsTag))
+	}
+
+	// reserved ranges
+	for i, rr := range enum.GetReservedRange() {
+		rangePath := append(path, internal.Enum_reservedRangeTag, int32(i))
+		rn := r.getEnumReservedRangeNode(rr).(*rangeNode)
+		sci.newLoc(rn, rangePath)
+		sci.newLoc(rn.stNode, append(rangePath, internal.ReservedRange_startTag))
+		if rn.stNode != rn.enNode {
+			sci.newLoc(rn.enNode, append(rangePath, internal.ReservedRange_endTag))
+		}
+	}
+
+	// reserved names
+	for i, rn := range n.reserved {
+		sci.newLoc(rn, append(path, internal.Enum_reservedNameTag, int32(i)))
+	}
+}
+
+func (r *parseResult) generateSourceCodeInfoForField(sci *sourceCodeInfo, fld *dpb.FieldDescriptorProto, path []int32) {
+	n := r.getFieldNode(fld)
+
+	isGroup := false
+	var opts []*optionNode
+	var extendee *extendNode
+	switch n := n.(type) {
+	case *fieldNode:
+		opts = n.options
+		extendee = n.extendee
+	case *mapFieldNode:
+		opts = n.options
+	case *groupNode:
+		isGroup = true
+		extendee = n.extendee
+	case *syntheticMapField:
+		// shouldn't get here since we don't recurse into fields from a mapNode
+		// in generateSourceCodeInfoForMessage... but just in case
+		return
+	}
+
+	sci.newLoc(n, path)
+	if !isGroup {
+		sci.newLoc(n.fieldName(), append(path, internal.Field_nameTag))
+		sci.newLoc(n.fieldType(), append(path, internal.Field_typeTag))
+	}
+	if n.fieldLabel() != nil {
+		sci.newLoc(n.fieldLabel(), append(path, internal.Field_labelTag))
+	}
+	sci.newLoc(n.fieldTag(), append(path, internal.Field_numberTag))
+	if extendee != nil {
+		sci.newLoc(extendee.extendee, append(path, internal.Field_extendeeTag))
+	}
+
+	r.generateSourceCodeInfoForOptions(sci, opts, func(n interface{}) *optionNode {
+		return n.(*optionNode)
+	}, fld.Options.GetUninterpretedOption(), append(path, internal.Field_optionsTag))
+}
+
+type sourceCodeInfo struct {
+	locs         []*dpb.SourceCodeInfo_Location
+	commentsUsed map[*comment]struct{}
+}
+
+func (sci *sourceCodeInfo) newLoc(n node, path []int32) {
+	leadingComments := n.leadingComments()
+	trailingComments := n.trailingComments()
+	if sci.commentUsed(leadingComments) {
+		leadingComments = nil
+	}
+	if sci.commentUsed(trailingComments) {
+		trailingComments = nil
+	}
+	detached := groupComments(leadingComments)
+	trail := combineComments(trailingComments)
+	var lead *string
+	if len(leadingComments) > 0 && leadingComments[len(leadingComments)-1].end.Line >= n.start().Line-1 {
+		lead = proto.String(detached[len(detached)-1])
+		detached = detached[:len(detached)-1]
+	}
+	dup := make([]int32, len(path))
+	copy(dup, path)
+	var span []int32
+	if n.start().Line == n.end().Line {
+		span = []int32{int32(n.start().Line) - 1, int32(n.start().Col) - 1, int32(n.end().Col) - 1}
+	} else {
+		span = []int32{int32(n.start().Line) - 1, int32(n.start().Col) - 1, int32(n.end().Line) - 1, int32(n.end().Col) - 1}
+	}
+	sci.locs = append(sci.locs, &dpb.SourceCodeInfo_Location{
+		LeadingDetachedComments: detached,
+		LeadingComments:         lead,
+		TrailingComments:        trail,
+		Path:                    dup,
+		Span:                    span,
+	})
+}
+
+func (sci *sourceCodeInfo) commentUsed(c []*comment) bool {
+	if len(c) == 0 {
+		return false
+	}
+	if _, ok := sci.commentsUsed[c[0]]; ok {
+		return true
+	}
+
+	sci.commentsUsed[c[0]] = struct{}{}
+	return false
+}
+
+func groupComments(comments []*comment) []string {
+	if len(comments) == 0 {
+		return nil
+	}
+
+	var groups []string
+	singleLineStyle := comments[0].text[:2] == "//"
+	line := comments[0].end.Line
+	start := 0
+	for i := 1; i < len(comments); i++ {
+		c := comments[i]
+		prevSingleLine := singleLineStyle
+		singleLineStyle = strings.HasPrefix(comments[i].text, "//")
+		if !singleLineStyle || prevSingleLine != singleLineStyle || c.start.Line > line+1 {
+			// new group!
+			groups = append(groups, *combineComments(comments[start:i]))
+			start = i
+		}
+		line = c.end.Line
+	}
+	// don't forget last group
+	groups = append(groups, *combineComments(comments[start:]))
+
+	return groups
+}
+
+func combineComments(comments []*comment) *string {
+	if len(comments) == 0 {
+		return nil
+	}
+	first := true
+	var buf bytes.Buffer
+	for _, c := range comments {
+		if first {
+			first = false
+		} else {
+			buf.WriteByte('\n')
+		}
+		if c.text[:2] == "//" {
+			buf.WriteString(c.text[2:])
+		} else {
+			lines := strings.Split(c.text[2:len(c.text)-2], "\n")
+			first := true
+			for _, l := range lines {
+				if first {
+					first = false
+				} else {
+					buf.WriteByte('\n')
+				}
+
+				// strip a prefix of whitespace followed by '*'
+				j := 0
+				for j < len(l) {
+					if l[j] != ' ' && l[j] != '\t' {
+						break
+					}
+					j++
+				}
+				if j == len(l) {
+					l = ""
+				} else if l[j] == '*' {
+					l = l[j+1:]
+				} else if j > 0 {
+					l = " " + l[j:]
+				}
+
+				buf.WriteString(l)
+			}
+		}
+	}
+	return proto.String(buf.String())
+}
+
+func (sci *sourceCodeInfo) generateLocs() []*dpb.SourceCodeInfo_Location {
+	// generate intermediate locations: paths between root (inclusive) and the
+	// leaf locations already created, these will not have comments but will
+	// have aggregate span, than runs from min(start pos) to max(end pos) for
+	// all descendent paths.
+
+	if len(sci.locs) == 0 {
+		// nothing to generate
+		return nil
+	}
+
+	var root locTrie
+	for _, loc := range sci.locs {
+		root.add(loc.Path, loc)
+	}
+	root.fillIn()
+	locs := make([]*dpb.SourceCodeInfo_Location, 0, root.countLocs())
+	root.aggregate(&locs)
+	// finally, sort the resulting slice by location
+	sort.Slice(locs, func(i, j int) bool {
+		startI, endI := getSpanPositions(locs[i].Span)
+		startJ, endJ := getSpanPositions(locs[j].Span)
+		cmp := compareSlice(startI, startJ)
+		if cmp == 0 {
+			// if start position is the same, sort by end position _decreasing_
+			// (so enclosing locations will appear before leaves)
+			cmp = -compareSlice(endI, endJ)
+			if cmp == 0 {
+				// start and end position are the same? so break ties using path
+				cmp = compareSlice(locs[i].Path, locs[j].Path)
+			}
+		}
+		return cmp < 0
+	})
+	return locs
+}
+
+type locTrie struct {
+	children map[int32]*locTrie
+	loc      *dpb.SourceCodeInfo_Location
+}
+
+func (t *locTrie) add(path []int32, loc *dpb.SourceCodeInfo_Location) {
+	if len(path) == 0 {
+		t.loc = loc
+		return
+	}
+	child := t.children[path[0]]
+	if child == nil {
+		if t.children == nil {
+			t.children = map[int32]*locTrie{}
+		}
+		child = &locTrie{}
+		t.children[path[0]] = child
+	}
+	child.add(path[1:], loc)
+}
+
+func (t *locTrie) fillIn() {
+	var path []int32
+	var start, end []int32
+	for _, child := range t.children {
+		// recurse
+		child.fillIn()
+		if t.loc == nil {
+			// maintain min(start) and max(end) so we can
+			// populate t.loc below
+			childStart, childEnd := getSpanPositions(child.loc.Span)
+
+			if start == nil {
+				if path == nil {
+					path = child.loc.Path[:len(child.loc.Path)-1]
+				}
+				start = childStart
+				end = childEnd
+			} else {
+				if compareSlice(childStart, start) < 0 {
+					start = childStart
+				}
+				if compareSlice(childEnd, end) > 0 {
+					end = childEnd
+				}
+			}
+		}
+	}
+
+	if t.loc == nil {
+		var span []int32
+		// we don't use append below because we want a new slice
+		// that doesn't share underlying buffer with spans from
+		// any other location
+		if start[0] == end[0] {
+			span = []int32{start[0], start[1], end[1]}
+		} else {
+			span = []int32{start[0], start[1], end[0], end[1]}
+		}
+		t.loc = &dpb.SourceCodeInfo_Location{
+			Path: path,
+			Span: span,
+		}
+	}
+}
+
+func (t *locTrie) countLocs() int {
+	count := 0
+	if t.loc != nil {
+		count = 1
+	}
+	for _, ch := range t.children {
+		count += ch.countLocs()
+	}
+	return count
+}
+
+func (t *locTrie) aggregate(dest *[]*dpb.SourceCodeInfo_Location) {
+	if t.loc != nil {
+		*dest = append(*dest, t.loc)
+	}
+	for _, child := range t.children {
+		child.aggregate(dest)
+	}
+}
+
+func getSpanPositions(span []int32) (start, end []int32) {
+	start = span[:2]
+	if len(span) == 3 {
+		end = []int32{span[0], span[2]}
+	} else {
+		end = span[2:]
+	}
+	return
+}
+
+func compareSlice(a, b []int32) int {
+	end := len(a)
+	if len(b) < end {
+		end = len(b)
+	}
+	for i := 0; i < end; i++ {
+		if a[i] < b[i] {
+			return -1
+		}
+		if a[i] > b[i] {
+			return 1
+		}
+	}
+	if len(a) < len(b) {
+		return -1
+	}
+	if len(a) > len(b) {
+		return 1
+	}
+	return 0
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/std_imports.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/std_imports.go
new file mode 100644
index 0000000..59bcdd3
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/std_imports.go
@@ -0,0 +1,49 @@
+package protoparse
+
+import (
+	dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
+	// link in packages that include the standard protos included with protoc
+	_ "github.com/golang/protobuf/protoc-gen-go/plugin"
+	_ "github.com/golang/protobuf/ptypes/any"
+	_ "github.com/golang/protobuf/ptypes/duration"
+	_ "github.com/golang/protobuf/ptypes/empty"
+	_ "github.com/golang/protobuf/ptypes/struct"
+	_ "github.com/golang/protobuf/ptypes/timestamp"
+	_ "github.com/golang/protobuf/ptypes/wrappers"
+	_ "google.golang.org/genproto/protobuf/api"
+	_ "google.golang.org/genproto/protobuf/field_mask"
+	_ "google.golang.org/genproto/protobuf/ptype"
+	_ "google.golang.org/genproto/protobuf/source_context"
+
+	"github.com/jhump/protoreflect/internal"
+)
+
+// All files that are included with protoc are also included with this package
+// so that clients do not need to explicitly supply a copy of these protos (just
+// like callers of protoc do not need to supply them).
+var standardImports map[string]*dpb.FileDescriptorProto
+
+func init() {
+	standardFilenames := []string{
+		"google/protobuf/any.proto",
+		"google/protobuf/api.proto",
+		"google/protobuf/compiler/plugin.proto",
+		"google/protobuf/descriptor.proto",
+		"google/protobuf/duration.proto",
+		"google/protobuf/empty.proto",
+		"google/protobuf/field_mask.proto",
+		"google/protobuf/source_context.proto",
+		"google/protobuf/struct.proto",
+		"google/protobuf/timestamp.proto",
+		"google/protobuf/type.proto",
+		"google/protobuf/wrappers.proto",
+	}
+
+	standardImports = map[string]*dpb.FileDescriptorProto{}
+	for _, fn := range standardFilenames {
+		fd, err := internal.LoadFileDescriptor(fn)
+		if err == nil {
+			standardImports[fn] = fd
+		}
+	}
+}
diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/test-source-info.txt b/vendor/github.com/jhump/protoreflect/desc/protoparse/test-source-info.txt
new file mode 100644
index 0000000..c03fd64
--- /dev/null
+++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/test-source-info.txt
@@ -0,0 +1,1696 @@
+---- desc_test_comments.proto ----
+
+
+:
+desc_test_comments.proto:8:1
+desc_test_comments.proto:119:2
+
+
+ > syntax:
+desc_test_comments.proto:8:1
+desc_test_comments.proto:8:19
+    Leading detached comment [0]:
+ This is the first detached comment for the syntax.
+    Leading detached comment [1]:
+
+ This is a second detached comment.
+
+    Leading detached comment [2]:
+ This is a third.
+    Leading comments:
+ Syntax comment...
+    Trailing comments:
+ Syntax trailer.
+
+
+ > package:
+desc_test_comments.proto:12:1
+desc_test_comments.proto:12:17
+    Leading comments:
+ And now the package declaration
+
+
+ > options:
+desc_test_comments.proto:15:1
+desc_test_comments.proto:15:75
+
+
+ > options > go_package:
+desc_test_comments.proto:15:1
+desc_test_comments.proto:15:75
+    Leading comments:
+ option comments FTW!!!
+
+
+ > dependency:
+desc_test_comments.proto:17:1
+desc_test_comments.proto:18:34
+
+
+ > dependency[0]:
+desc_test_comments.proto:17:1
+desc_test_comments.proto:17:38
+
+
+ > dependency[1]:
+desc_test_comments.proto:18:1
+desc_test_comments.proto:18:34
+
+
+ > message_type:
+desc_test_comments.proto:25:1
+desc_test_comments.proto:89:2
+
+
+ > message_type[0]:
+desc_test_comments.proto:25:1
+desc_test_comments.proto:89:2
+    Leading detached comment [0]:
+ Multiple white space lines (like above) cannot
+ be preserved...
+    Leading comments:
+ We need a request for our RPC service below.
+    Trailing comments:
+ And next we'll need some extensions...
+
+
+ > message_type[0] > name:
+desc_test_comments.proto:25:68
+desc_test_comments.proto:25:75
+    Leading detached comment [0]:
+ detached message name 
+    Leading comments:
+ request with a capital R 
+    Trailing comments:
+ trailer
+
+
+ > message_type[0] > options:
+desc_test_comments.proto:26:3
+desc_test_comments.proto:35:54
+
+
+ > message_type[0] > options > deprecated:
+desc_test_comments.proto:26:3
+desc_test_comments.proto:26:28
+
+
+ > message_type[0] > field:
+desc_test_comments.proto:29:2
+desc_test_comments.proto:66:3
+
+
+ > message_type[0] > field[0]:
+desc_test_comments.proto:29:2
+desc_test_comments.proto:32:92
+    Leading comments:
+ A field comment
+    Trailing comments:
+ field trailer #1...
+
+
+ > message_type[0] > field[0] > label:
+desc_test_comments.proto:29:2
+desc_test_comments.proto:29:10
+
+
+ > message_type[0] > field[0] > type:
+desc_test_comments.proto:29:11
+desc_test_comments.proto:29:16
+
+
+ > message_type[0] > field[0] > name:
+desc_test_comments.proto:29:17
+desc_test_comments.proto:29:20
+
+
+ > message_type[0] > field[0] > number:
+desc_test_comments.proto:29:63
+desc_test_comments.proto:29:64
+    Leading detached comment [0]:
+ detached tag 
+    Leading comments:
+ tag numero uno 
+    Trailing comments:
+ tag trailer
+ that spans multiple lines...
+ more than two. 
+
+
+ > message_type[0] > field[0] > options:
+desc_test_comments.proto:32:5
+desc_test_comments.proto:32:90
+
+
+ > message_type[0] > field[0] > options > packed:
+desc_test_comments.proto:32:5
+desc_test_comments.proto:32:16
+
+
+ > message_type[0] > field[0] > json_name:
+desc_test_comments.proto:32:18
+desc_test_comments.proto:32:35
+
+
+ > message_type[0] > field[0] > options > ffubar:
+desc_test_comments.proto:32:37
+desc_test_comments.proto:32:62
+
+
+ > message_type[0] > field[0] > options > ffubar[0]:
+desc_test_comments.proto:32:37
+desc_test_comments.proto:32:62
+
+
+ > message_type[0] > field[0] > options > ffubarb:
+desc_test_comments.proto:32:64
+desc_test_comments.proto:32:90
+
+
+ > message_type[0] > options > mfubar:
+desc_test_comments.proto:35:20
+desc_test_comments.proto:35:54
+    Leading comments:
+ lead mfubar 
+    Trailing comments:
+ trailing mfubar
+
+
+ > message_type[0] > field[1]:
+desc_test_comments.proto:42:22
+desc_test_comments.proto:43:63
+    Leading detached comment [0]:
+ some detached comments
+    Leading detached comment [1]:
+ some detached comments
+    Leading detached comment [2]:
+ Another field comment
+    Leading comments:
+ label comment 
+
+
+ > message_type[0] > field[1] > label:
+desc_test_comments.proto:42:22
+desc_test_comments.proto:42:30
+
+
+ > message_type[0] > field[1] > type:
+desc_test_comments.proto:42:50
+desc_test_comments.proto:42:56
+    Leading comments:
+ type comment 
+
+
+ > message_type[0] > field[1] > name:
+desc_test_comments.proto:42:76
+desc_test_comments.proto:42:80
+    Leading comments:
+ name comment 
+
+
+ > message_type[0] > field[1] > number:
+desc_test_comments.proto:42:83
+desc_test_comments.proto:42:84
+
+
+ > message_type[0] > field[1] > default_value:
+desc_test_comments.proto:43:23
+desc_test_comments.proto:43:40
+    Leading comments:
+ default lead 
+    Trailing comments:
+ default trail 
+
+
+ > message_type[0] > extension_range:
+desc_test_comments.proto:46:13
+desc_test_comments.proto:47:23
+
+
+ > message_type[0] > extension_range[0]:
+desc_test_comments.proto:46:13
+desc_test_comments.proto:46:23
+
+
+ > message_type[0] > extension_range[0] > start:
+desc_test_comments.proto:46:13
+desc_test_comments.proto:46:16
+
+
+ > message_type[0] > extension_range[0] > end:
+desc_test_comments.proto:46:20
+desc_test_comments.proto:46:23
+
+
+ > message_type[0] > extension_range[1]:
+desc_test_comments.proto:47:13
+desc_test_comments.proto:47:23
+
+
+ > message_type[0] > extension_range[1] > start:
+desc_test_comments.proto:47:13
+desc_test_comments.proto:47:16
+
+
+ > message_type[0] > extension_range[1] > end:
+desc_test_comments.proto:47:20
+desc_test_comments.proto:47:23
+
+
+ > message_type[0] > extension_range[1] > options:
+desc_test_comments.proto:47:25
+desc_test_comments.proto:47:100
+
+
+ > message_type[0] > extension_range[1] > options > exfubarb:
+desc_test_comments.proto:47:25
+desc_test_comments.proto:47:67
+
+
+ > message_type[0] > extension_range[1] > options > exfubar:
+desc_test_comments.proto:47:69
+desc_test_comments.proto:47:100
+
+
+ > message_type[0] > extension_range[1] > options > exfubar[0]:
+desc_test_comments.proto:47:69
+desc_test_comments.proto:47:100
+
+
+ > message_type[0] > reserved_range:
+desc_test_comments.proto:51:50
+desc_test_comments.proto:51:68
+
+
+ > message_type[0] > reserved_range[0]:
+desc_test_comments.proto:51:50
+desc_test_comments.proto:51:58
+
+
+ > message_type[0] > reserved_range[0] > start:
+desc_test_comments.proto:51:50
+desc_test_comments.proto:51:52
+
+
+ > message_type[0] > reserved_range[0] > end:
+desc_test_comments.proto:51:56
+desc_test_comments.proto:51:58
+
+
+ > message_type[0] > reserved_range[1]:
+desc_test_comments.proto:51:60
+desc_test_comments.proto:51:68
+
+
+ > message_type[0] > reserved_range[1] > start:
+desc_test_comments.proto:51:60
+desc_test_comments.proto:51:62
+
+
+ > message_type[0] > reserved_range[1] > end:
+desc_test_comments.proto:51:66
+desc_test_comments.proto:51:68
+
+
+ > message_type[0] > reserved_name:
+desc_test_comments.proto:52:11
+desc_test_comments.proto:52:30
+
+
+ > message_type[0] > reserved_name[0]:
+desc_test_comments.proto:52:11
+desc_test_comments.proto:52:16
+
+
+ > message_type[0] > reserved_name[1]:
+desc_test_comments.proto:52:18
+desc_test_comments.proto:52:23
+
+
+ > message_type[0] > reserved_name[2]:
+desc_test_comments.proto:52:25
+desc_test_comments.proto:52:30
+
+
+ > message_type[0] > field[2]:
+desc_test_comments.proto:55:2
+desc_test_comments.proto:66:3
+    Leading comments:
+ Group comment
+
+
+ > message_type[0] > nested_type:
+desc_test_comments.proto:55:2
+desc_test_comments.proto:66:3
+
+
+ > message_type[0] > nested_type[0]:
+desc_test_comments.proto:55:2
+desc_test_comments.proto:66:3
+
+
+ > message_type[0] > field[2] > label:
+desc_test_comments.proto:55:2
+desc_test_comments.proto:55:10
+
+
+ > message_type[0] > nested_type[0] > name:
+desc_test_comments.proto:55:34
+desc_test_comments.proto:55:40
+    Leading comments:
+ group name 
+
+
+ > message_type[0] > field[2] > number:
+desc_test_comments.proto:55:43
+desc_test_comments.proto:55:44
+
+
+ > message_type[0] > nested_type[0] > options:
+desc_test_comments.proto:56:3
+desc_test_comments.proto:61:50
+
+
+ > message_type[0] > nested_type[0] > options > mfubar:
+desc_test_comments.proto:56:3
+desc_test_comments.proto:56:38
+
+
+ > message_type[0] > nested_type[0] > field:
+desc_test_comments.proto:58:3
+desc_test_comments.proto:64:27
+
+
+ > message_type[0] > nested_type[0] > field[0]:
+desc_test_comments.proto:58:3
+desc_test_comments.proto:58:27
+
+
+ > message_type[0] > nested_type[0] > field[0] > label:
+desc_test_comments.proto:58:3
+desc_test_comments.proto:58:11
+
+
+ > message_type[0] > nested_type[0] > field[0] > type:
+desc_test_comments.proto:58:12
+desc_test_comments.proto:58:18
+
+
+ > message_type[0] > nested_type[0] > field[0] > name:
+desc_test_comments.proto:58:19
+desc_test_comments.proto:58:22
+
+
+ > message_type[0] > nested_type[0] > field[0] > number:
+desc_test_comments.proto:58:25
+desc_test_comments.proto:58:26
+
+
+ > message_type[0] > nested_type[0] > field[1]:
+desc_test_comments.proto:59:3
+desc_test_comments.proto:59:26
+
+
+ > message_type[0] > nested_type[0] > field[1] > label:
+desc_test_comments.proto:59:3
+desc_test_comments.proto:59:11
+
+
+ > message_type[0] > nested_type[0] > field[1] > type:
+desc_test_comments.proto:59:12
+desc_test_comments.proto:59:17
+
+
+ > message_type[0] > nested_type[0] > field[1] > name:
+desc_test_comments.proto:59:18
+desc_test_comments.proto:59:21
+
+
+ > message_type[0] > nested_type[0] > field[1] > number:
+desc_test_comments.proto:59:24
+desc_test_comments.proto:59:25
+
+
+ > message_type[0] > nested_type[0] > options > no_standard_descriptor_accessor:
+desc_test_comments.proto:61:3
+desc_test_comments.proto:61:50
+
+
+ > message_type[0] > nested_type[0] > field[2]:
+desc_test_comments.proto:64:3
+desc_test_comments.proto:64:27
+    Leading comments:
+ Leading comment...
+    Trailing comments:
+ Trailing comment...
+
+
+ > message_type[0] > nested_type[0] > field[2] > label:
+desc_test_comments.proto:64:3
+desc_test_comments.proto:64:11
+
+
+ > message_type[0] > nested_type[0] > field[2] > type:
+desc_test_comments.proto:64:12
+desc_test_comments.proto:64:18
+
+
+ > message_type[0] > nested_type[0] > field[2] > name:
+desc_test_comments.proto:64:19
+desc_test_comments.proto:64:22
+
+
+ > message_type[0] > nested_type[0] > field[2] > number:
+desc_test_comments.proto:64:25
+desc_test_comments.proto:64:26
+
+
+ > message_type[0] > enum_type:
+desc_test_comments.proto:68:2
+desc_test_comments.proto:88:3
+
+
+ > message_type[0] > enum_type[0]:
+desc_test_comments.proto:68:2
+desc_test_comments.proto:88:3
+
+
+ > message_type[0] > enum_type[0] > name:
+desc_test_comments.proto:68:7
+desc_test_comments.proto:68:22
+    Trailing comments:
+ "super"!
+
+
+ > message_type[0] > enum_type[0] > value:
+desc_test_comments.proto:72:3
+desc_test_comments.proto:85:17
+
+
+ > message_type[0] > enum_type[0] > value[0]:
+desc_test_comments.proto:72:3
+desc_test_comments.proto:72:72
+
+
+ > message_type[0] > enum_type[0] > value[0] > name:
+desc_test_comments.proto:72:3
+desc_test_comments.proto:72:8
+
+
+ > message_type[0] > enum_type[0] > value[0] > number:
+desc_test_comments.proto:72:11
+desc_test_comments.proto:72:12
+
+
+ > message_type[0] > enum_type[0] > value[0] > options:
+desc_test_comments.proto:72:14
+desc_test_comments.proto:72:70
+
+
+ > message_type[0] > enum_type[0] > value[0] > options > evfubars:
+desc_test_comments.proto:72:14
+desc_test_comments.proto:72:42
+
+
+ > message_type[0] > enum_type[0] > value[0] > options > evfubar:
+desc_test_comments.proto:72:44
+desc_test_comments.proto:72:70
+
+
+ > message_type[0] > enum_type[0] > value[1]:
+desc_test_comments.proto:73:3
+desc_test_comments.proto:73:86
+
+
+ > message_type[0] > enum_type[0] > value[1] > name:
+desc_test_comments.proto:73:3
+desc_test_comments.proto:73:8
+
+
+ > message_type[0] > enum_type[0] > value[1] > number:
+desc_test_comments.proto:73:11
+desc_test_comments.proto:73:12
+
+
+ > message_type[0] > enum_type[0] > value[1] > options:
+desc_test_comments.proto:73:15
+desc_test_comments.proto:73:84
+
+
+ > message_type[0] > enum_type[0] > value[1] > options > evfubaruf:
+desc_test_comments.proto:73:15
+desc_test_comments.proto:73:43
+
+
+ > message_type[0] > enum_type[0] > value[1] > options > evfubaru:
+desc_test_comments.proto:73:59
+desc_test_comments.proto:73:84
+
+
+ > message_type[0] > enum_type[0] > value[2]:
+desc_test_comments.proto:74:3
+desc_test_comments.proto:74:13
+
+
+ > message_type[0] > enum_type[0] > value[2] > name:
+desc_test_comments.proto:74:3
+desc_test_comments.proto:74:8
+
+
+ > message_type[0] > enum_type[0] > value[2] > number:
+desc_test_comments.proto:74:11
+desc_test_comments.proto:74:12
+
+
+ > message_type[0] > enum_type[0] > value[3]:
+desc_test_comments.proto:75:3
+desc_test_comments.proto:75:14
+
+
+ > message_type[0] > enum_type[0] > value[3] > name:
+desc_test_comments.proto:75:3
+desc_test_comments.proto:75:9
+
+
+ > message_type[0] > enum_type[0] > value[3] > number:
+desc_test_comments.proto:75:12
+desc_test_comments.proto:75:13
+
+
+ > message_type[0] > enum_type[0] > options:
+desc_test_comments.proto:77:3
+desc_test_comments.proto:87:36
+
+
+ > message_type[0] > enum_type[0] > options > efubars:
+desc_test_comments.proto:77:3
+desc_test_comments.proto:77:38
+
+
+ > message_type[0] > enum_type[0] > value[4]:
+desc_test_comments.proto:79:3
+desc_test_comments.proto:79:13
+
+
+ > message_type[0] > enum_type[0] > value[4] > name:
+desc_test_comments.proto:79:3
+desc_test_comments.proto:79:8
+
+
+ > message_type[0] > enum_type[0] > value[4] > number:
+desc_test_comments.proto:79:11
+desc_test_comments.proto:79:12
+
+
+ > message_type[0] > enum_type[0] > value[5]:
+desc_test_comments.proto:80:3
+desc_test_comments.proto:80:15
+
+
+ > message_type[0] > enum_type[0] > value[5] > name:
+desc_test_comments.proto:80:3
+desc_test_comments.proto:80:10
+
+
+ > message_type[0] > enum_type[0] > value[5] > number:
+desc_test_comments.proto:80:13
+desc_test_comments.proto:80:14
+
+
+ > message_type[0] > enum_type[0] > value[6]:
+desc_test_comments.proto:81:3
+desc_test_comments.proto:81:46
+
+
+ > message_type[0] > enum_type[0] > value[6] > name:
+desc_test_comments.proto:81:3
+desc_test_comments.proto:81:10
+
+
+ > message_type[0] > enum_type[0] > value[6] > number:
+desc_test_comments.proto:81:13
+desc_test_comments.proto:81:14
+
+
+ > message_type[0] > enum_type[0] > value[6] > options:
+desc_test_comments.proto:81:16
+desc_test_comments.proto:81:44
+
+
+ > message_type[0] > enum_type[0] > value[6] > options > evfubarsf:
+desc_test_comments.proto:81:16
+desc_test_comments.proto:81:44
+
+
+ > message_type[0] > enum_type[0] > value[7]:
+desc_test_comments.proto:82:3
+desc_test_comments.proto:82:14
+
+
+ > message_type[0] > enum_type[0] > value[7] > name:
+desc_test_comments.proto:82:3
+desc_test_comments.proto:82:9
+
+
+ > message_type[0] > enum_type[0] > value[7] > number:
+desc_test_comments.proto:82:12
+desc_test_comments.proto:82:13
+
+
+ > message_type[0] > enum_type[0] > value[8]:
+desc_test_comments.proto:83:3
+desc_test_comments.proto:83:17
+
+
+ > message_type[0] > enum_type[0] > value[8] > name:
+desc_test_comments.proto:83:3
+desc_test_comments.proto:83:12
+
+
+ > message_type[0] > enum_type[0] > value[8] > number:
+desc_test_comments.proto:83:15
+desc_test_comments.proto:83:16
+
+
+ > message_type[0] > enum_type[0] > value[9]:
+desc_test_comments.proto:84:3
+desc_test_comments.proto:84:13
+
+
+ > message_type[0] > enum_type[0] > value[9] > name:
+desc_test_comments.proto:84:3
+desc_test_comments.proto:84:8
+
+
+ > message_type[0] > enum_type[0] > value[9] > number:
+desc_test_comments.proto:84:11
+desc_test_comments.proto:84:12
+
+
+ > message_type[0] > enum_type[0] > value[10]:
+desc_test_comments.proto:85:3
+desc_test_comments.proto:85:17
+
+
+ > message_type[0] > enum_type[0] > value[10] > name:
+desc_test_comments.proto:85:3
+desc_test_comments.proto:85:9
+
+
+ > message_type[0] > enum_type[0] > value[10] > number:
+desc_test_comments.proto:85:12
+desc_test_comments.proto:85:16
+
+
+ > message_type[0] > enum_type[0] > options > efubar:
+desc_test_comments.proto:87:3
+desc_test_comments.proto:87:36
+
+
+ > extension[0] > extendee:
+desc_test_comments.proto:94:1
+desc_test_comments.proto:94:8
+    Leading comments:
+ extendee comment
+
+
+ > extension[1] > extendee:
+desc_test_comments.proto:94:1
+desc_test_comments.proto:94:8
+
+
+ > extension:
+desc_test_comments.proto:96:2
+desc_test_comments.proto:98:30
+
+
+ > extension[0]:
+desc_test_comments.proto:96:2
+desc_test_comments.proto:96:30
+    Leading comments:
+ comment for guid1
+
+
+ > extension[0] > label:
+desc_test_comments.proto:96:2
+desc_test_comments.proto:96:10
+
+
+ > extension[0] > type:
+desc_test_comments.proto:96:11
+desc_test_comments.proto:96:17
+
+
+ > extension[0] > name:
+desc_test_comments.proto:96:18
+desc_test_comments.proto:96:23
+
+
+ > extension[0] > number:
+desc_test_comments.proto:96:26
+desc_test_comments.proto:96:29
+
+
+ > extension[1]:
+desc_test_comments.proto:98:2
+desc_test_comments.proto:98:30
+    Leading comments:
+ ... and a comment for guid2
+
+
+ > extension[1] > label:
+desc_test_comments.proto:98:2
+desc_test_comments.proto:98:10
+
+
+ > extension[1] > type:
+desc_test_comments.proto:98:11
+desc_test_comments.proto:98:17
+
+
+ > extension[1] > name:
+desc_test_comments.proto:98:18
+desc_test_comments.proto:98:23
+
+
+ > extension[1] > number:
+desc_test_comments.proto:98:26
+desc_test_comments.proto:98:29
+
+
+ > service:
+desc_test_comments.proto:103:1
+desc_test_comments.proto:119:2
+
+
+ > service[0]:
+desc_test_comments.proto:103:1
+desc_test_comments.proto:119:2
+    Leading comments:
+ Service comment
+    Trailing comments:
+ service trailer
+
+
+ > service[0] > name:
+desc_test_comments.proto:103:28
+desc_test_comments.proto:103:38
+    Leading comments:
+ service name 
+
+
+ > service[0] > options:
+desc_test_comments.proto:104:2
+desc_test_comments.proto:108:38
+
+
+ > service[0] > options > sfubar:
+desc_test_comments.proto:104:2
+desc_test_comments.proto:105:40
+
+
+ > service[0] > options > sfubar > id:
+desc_test_comments.proto:104:2
+desc_test_comments.proto:104:36
+
+
+ > service[0] > options > sfubar > name:
+desc_test_comments.proto:105:2
+desc_test_comments.proto:105:40
+
+
+ > service[0] > options > deprecated:
+desc_test_comments.proto:106:2
+desc_test_comments.proto:106:28
+
+
+ > service[0] > options > sfubare:
+desc_test_comments.proto:108:2
+desc_test_comments.proto:108:38
+
+
+ > service[0] > method:
+desc_test_comments.proto:111:2
+desc_test_comments.proto:118:3
+
+
+ > service[0] > method[0]:
+desc_test_comments.proto:111:2
+desc_test_comments.proto:112:70
+    Leading comments:
+ Method comment
+
+
+ > service[0] > method[0] > name:
+desc_test_comments.proto:111:21
+desc_test_comments.proto:111:33
+    Leading comments:
+ rpc name 
+    Trailing comments:
+ comment A 
+
+
+ > service[0] > method[0] > client_streaming:
+desc_test_comments.proto:111:66
+desc_test_comments.proto:111:72
+    Leading comments:
+ comment B 
+
+
+ > service[0] > method[0] > input_type:
+desc_test_comments.proto:111:89
+desc_test_comments.proto:111:96
+    Leading comments:
+ comment C 
+
+
+ > service[0] > method[0] > output_type:
+desc_test_comments.proto:112:43
+desc_test_comments.proto:112:50
+    Leading comments:
+comment E 
+
+
+ > service[0] > method[1]:
+desc_test_comments.proto:114:2
+desc_test_comments.proto:118:3
+
+
+ > service[0] > method[1] > name:
+desc_test_comments.proto:114:6
+desc_test_comments.proto:114:14
+
+
+ > service[0] > method[1] > input_type:
+desc_test_comments.proto:114:16
+desc_test_comments.proto:114:23
+
+
+ > service[0] > method[1] > output_type:
+desc_test_comments.proto:114:34
+desc_test_comments.proto:114:55
+
+
+ > service[0] > method[1] > options:
+desc_test_comments.proto:115:3
+desc_test_comments.proto:117:42
+
+
+ > service[0] > method[1] > options > deprecated:
+desc_test_comments.proto:115:3
+desc_test_comments.proto:115:28
+
+
+ > service[0] > method[1] > options > mtfubar:
+desc_test_comments.proto:116:3
+desc_test_comments.proto:116:39
+
+
+ > service[0] > method[1] > options > mtfubar[0]:
+desc_test_comments.proto:116:3
+desc_test_comments.proto:116:39
+
+
+ > service[0] > method[1] > options > mtfubard:
+desc_test_comments.proto:117:3
+desc_test_comments.proto:117:42
+---- desc_test_options.proto ----
+
+
+:
+desc_test_options.proto:1:1
+desc_test_options.proto:62:34
+
+
+ > syntax:
+desc_test_options.proto:1:1
+desc_test_options.proto:1:19
+
+
+ > options:
+desc_test_options.proto:3:1
+desc_test_options.proto:3:73
+
+
+ > options > go_package:
+desc_test_options.proto:3:1
+desc_test_options.proto:3:73
+
+
+ > package:
+desc_test_options.proto:5:1
+desc_test_options.proto:5:20
+
+
+ > dependency:
+desc_test_options.proto:7:1
+desc_test_options.proto:7:43
+
+
+ > dependency[0]:
+desc_test_options.proto:7:1
+desc_test_options.proto:7:43
+
+
+ > extension[0] > extendee:
+desc_test_options.proto:9:8
+desc_test_options.proto:9:38
+
+
+ > extension:
+desc_test_options.proto:10:2
+desc_test_options.proto:62:34
+
+
+ > extension[0]:
+desc_test_options.proto:10:2
+desc_test_options.proto:10:31
+
+
+ > extension[0] > label:
+desc_test_options.proto:10:2
+desc_test_options.proto:10:10
+
+
+ > extension[0] > type:
+desc_test_options.proto:10:11
+desc_test_options.proto:10:15
+
+
+ > extension[0] > name:
+desc_test_options.proto:10:16
+desc_test_options.proto:10:22
+
+
+ > extension[0] > number:
+desc_test_options.proto:10:25
+desc_test_options.proto:10:30
+
+
+ > extension[1] > extendee:
+desc_test_options.proto:13:8
+desc_test_options.proto:13:36
+
+
+ > extension[2] > extendee:
+desc_test_options.proto:13:8
+desc_test_options.proto:13:36
+
+
+ > extension[1]:
+desc_test_options.proto:14:2
+desc_test_options.proto:14:33
+
+
+ > extension[1] > label:
+desc_test_options.proto:14:2
+desc_test_options.proto:14:10
+
+
+ > extension[1] > type:
+desc_test_options.proto:14:11
+desc_test_options.proto:14:17
+
+
+ > extension[1] > name:
+desc_test_options.proto:14:18
+desc_test_options.proto:14:24
+
+
+ > extension[1] > number:
+desc_test_options.proto:14:27
+desc_test_options.proto:14:32
+
+
+ > extension[2]:
+desc_test_options.proto:15:2
+desc_test_options.proto:15:33
+
+
+ > extension[2] > label:
+desc_test_options.proto:15:2
+desc_test_options.proto:15:10
+
+
+ > extension[2] > type:
+desc_test_options.proto:15:11
+desc_test_options.proto:15:16
+
+
+ > extension[2] > name:
+desc_test_options.proto:15:17
+desc_test_options.proto:15:24
+
+
+ > extension[2] > number:
+desc_test_options.proto:15:27
+desc_test_options.proto:15:32
+
+
+ > extension[3] > extendee:
+desc_test_options.proto:18:8
+desc_test_options.proto:18:35
+
+
+ > extension[4] > extendee:
+desc_test_options.proto:18:8
+desc_test_options.proto:18:35
+
+
+ > extension[5] > extendee:
+desc_test_options.proto:18:8
+desc_test_options.proto:18:35
+
+
+ > extension[6] > extendee:
+desc_test_options.proto:18:8
+desc_test_options.proto:18:35
+
+
+ > extension[7] > extendee:
+desc_test_options.proto:18:8
+desc_test_options.proto:18:35
+
+
+ > extension[3]:
+desc_test_options.proto:19:2
+desc_test_options.proto:19:32
+
+
+ > extension[3] > label:
+desc_test_options.proto:19:2
+desc_test_options.proto:19:10
+
+
+ > extension[3] > type:
+desc_test_options.proto:19:11
+desc_test_options.proto:19:16
+
+
+ > extension[3] > name:
+desc_test_options.proto:19:17
+desc_test_options.proto:19:23
+
+
+ > extension[3] > number:
+desc_test_options.proto:19:26
+desc_test_options.proto:19:31
+
+
+ > extension[4]:
+desc_test_options.proto:20:2
+desc_test_options.proto:20:34
+
+
+ > extension[4] > label:
+desc_test_options.proto:20:2
+desc_test_options.proto:20:10
+
+
+ > extension[4] > type:
+desc_test_options.proto:20:11
+desc_test_options.proto:20:17
+
+
+ > extension[4] > name:
+desc_test_options.proto:20:18
+desc_test_options.proto:20:25
+
+
+ > extension[4] > number:
+desc_test_options.proto:20:28
+desc_test_options.proto:20:33
+
+
+ > extension[5]:
+desc_test_options.proto:21:2
+desc_test_options.proto:21:37
+
+
+ > extension[5] > label:
+desc_test_options.proto:21:2
+desc_test_options.proto:21:10
+
+
+ > extension[5] > type:
+desc_test_options.proto:21:11
+desc_test_options.proto:21:19
+
+
+ > extension[5] > name:
+desc_test_options.proto:21:20
+desc_test_options.proto:21:28
+
+
+ > extension[5] > number:
+desc_test_options.proto:21:31
+desc_test_options.proto:21:36
+
+
+ > extension[6]:
+desc_test_options.proto:22:2
+desc_test_options.proto:22:34
+
+
+ > extension[6] > label:
+desc_test_options.proto:22:2
+desc_test_options.proto:22:10
+
+
+ > extension[6] > type:
+desc_test_options.proto:22:11
+desc_test_options.proto:22:17
+
+
+ > extension[6] > name:
+desc_test_options.proto:22:18
+desc_test_options.proto:22:25
+
+
+ > extension[6] > number:
+desc_test_options.proto:22:28
+desc_test_options.proto:22:33
+
+
+ > extension[7]:
+desc_test_options.proto:23:2
+desc_test_options.proto:23:36
+
+
+ > extension[7] > label:
+desc_test_options.proto:23:2
+desc_test_options.proto:23:10
+
+
+ > extension[7] > type:
+desc_test_options.proto:23:11
+desc_test_options.proto:23:18
+
+
+ > extension[7] > name:
+desc_test_options.proto:23:19
+desc_test_options.proto:23:27
+
+
+ > extension[7] > number:
+desc_test_options.proto:23:30
+desc_test_options.proto:23:35
+
+
+ > extension[8] > extendee:
+desc_test_options.proto:26:8
+desc_test_options.proto:26:40
+
+
+ > extension[9] > extendee:
+desc_test_options.proto:26:8
+desc_test_options.proto:26:40
+
+
+ > extension[10] > extendee:
+desc_test_options.proto:26:8
+desc_test_options.proto:26:40
+
+
+ > extension[11] > extendee:
+desc_test_options.proto:26:8
+desc_test_options.proto:26:40
+
+
+ > extension[12] > extendee:
+desc_test_options.proto:26:8
+desc_test_options.proto:26:40
+
+
+ > extension[8]:
+desc_test_options.proto:27:2
+desc_test_options.proto:27:33
+
+
+ > extension[8] > label:
+desc_test_options.proto:27:2
+desc_test_options.proto:27:10
+
+
+ > extension[8] > type:
+desc_test_options.proto:27:11
+desc_test_options.proto:27:16
+
+
+ > extension[8] > name:
+desc_test_options.proto:27:17
+desc_test_options.proto:27:24
+
+
+ > extension[8] > number:
+desc_test_options.proto:27:27
+desc_test_options.proto:27:32
+
+
+ > extension[9]:
+desc_test_options.proto:28:2
+desc_test_options.proto:28:35
+
+
+ > extension[9] > label:
+desc_test_options.proto:28:2
+desc_test_options.proto:28:10
+
+
+ > extension[9] > type:
+desc_test_options.proto:28:11
+desc_test_options.proto:28:17
+
+
+ > extension[9] > name:
+desc_test_options.proto:28:18
+desc_test_options.proto:28:26
+
+
+ > extension[9] > number:
+desc_test_options.proto:28:29
+desc_test_options.proto:28:34
+
+
+ > extension[10]:
+desc_test_options.proto:29:2
+desc_test_options.proto:29:38
+
+
+ > extension[10] > label:
+desc_test_options.proto:29:2
+desc_test_options.proto:29:10
+
+
+ > extension[10] > type:
+desc_test_options.proto:29:11
+desc_test_options.proto:29:19
+
+
+ > extension[10] > name:
+desc_test_options.proto:29:20
+desc_test_options.proto:29:29
+
+
+ > extension[10] > number:
+desc_test_options.proto:29:32
+desc_test_options.proto:29:37
+
+
+ > extension[11]:
+desc_test_options.proto:30:2
+desc_test_options.proto:30:35
+
+
+ > extension[11] > label:
+desc_test_options.proto:30:2
+desc_test_options.proto:30:10
+
+
+ > extension[11] > type:
+desc_test_options.proto:30:11
+desc_test_options.proto:30:17
+
+
+ > extension[11] > name:
+desc_test_options.proto:30:18
+desc_test_options.proto:30:26
+
+
+ > extension[11] > number:
+desc_test_options.proto:30:29
+desc_test_options.proto:30:34
+
+
+ > extension[12]:
+desc_test_options.proto:31:2
+desc_test_options.proto:31:37
+
+
+ > extension[12] > label:
+desc_test_options.proto:31:2
+desc_test_options.proto:31:10
+
+
+ > extension[12] > type:
+desc_test_options.proto:31:11
+desc_test_options.proto:31:18
+
+
+ > extension[12] > name:
+desc_test_options.proto:31:19
+desc_test_options.proto:31:28
+
+
+ > extension[12] > number:
+desc_test_options.proto:31:31
+desc_test_options.proto:31:36
+
+
+ > extension[13] > extendee:
+desc_test_options.proto:34:8
+desc_test_options.proto:34:38
+
+
+ > extension[14] > extendee:
+desc_test_options.proto:34:8
+desc_test_options.proto:34:38
+
+
+ > extension[13]:
+desc_test_options.proto:35:2
+desc_test_options.proto:35:46
+
+
+ > extension[13] > label:
+desc_test_options.proto:35:2
+desc_test_options.proto:35:10
+
+
+ > extension[13] > type:
+desc_test_options.proto:35:11
+desc_test_options.proto:35:30
+
+
+ > extension[13] > name:
+desc_test_options.proto:35:31
+desc_test_options.proto:35:37
+
+
+ > extension[13] > number:
+desc_test_options.proto:35:40
+desc_test_options.proto:35:45
+
+
+ > extension[14]:
+desc_test_options.proto:36:2
+desc_test_options.proto:36:44
+
+
+ > extension[14] > label:
+desc_test_options.proto:36:2
+desc_test_options.proto:36:10
+
+
+ > extension[14] > type:
+desc_test_options.proto:36:11
+desc_test_options.proto:36:27
+
+
+ > extension[14] > name:
+desc_test_options.proto:36:28
+desc_test_options.proto:36:35
+
+
+ > extension[14] > number:
+desc_test_options.proto:36:38
+desc_test_options.proto:36:43
+
+
+ > extension[15] > extendee:
+desc_test_options.proto:39:8
+desc_test_options.proto:39:37
+
+
+ > extension[16] > extendee:
+desc_test_options.proto:39:8
+desc_test_options.proto:39:37
+
+
+ > extension[15]:
+desc_test_options.proto:40:2
+desc_test_options.proto:40:33
+
+
+ > extension[15] > label:
+desc_test_options.proto:40:2
+desc_test_options.proto:40:10
+
+
+ > extension[15] > type:
+desc_test_options.proto:40:11
+desc_test_options.proto:40:16
+
+
+ > extension[15] > name:
+desc_test_options.proto:40:17
+desc_test_options.proto:40:24
+
+
+ > extension[15] > number:
+desc_test_options.proto:40:27
+desc_test_options.proto:40:32
+
+
+ > extension[16]:
+desc_test_options.proto:41:2
+desc_test_options.proto:41:35
+
+
+ > extension[16] > label:
+desc_test_options.proto:41:2
+desc_test_options.proto:41:10
+
+
+ > extension[16] > type:
+desc_test_options.proto:41:11
+desc_test_options.proto:41:17
+
+
+ > extension[16] > name:
+desc_test_options.proto:41:18
+desc_test_options.proto:41:26
+
+
+ > extension[16] > number:
+desc_test_options.proto:41:29
+desc_test_options.proto:41:34
+
+
+ > message_type:
+desc_test_options.proto:45:1
+desc_test_options.proto:48:2
+
+
+ > message_type[0]:
+desc_test_options.proto:45:1
+desc_test_options.proto:48:2
+    Leading comments:
+ Test message used by custom options
+
+
+ > message_type[0] > name:
+desc_test_options.proto:45:9
+desc_test_options.proto:45:28
+
+
+ > message_type[0] > field:
+desc_test_options.proto:46:2
+desc_test_options.proto:47:27
+
+
+ > message_type[0] > field[0]:
+desc_test_options.proto:46:2
+desc_test_options.proto:46:25
+
+
+ > message_type[0] > field[0] > label:
+desc_test_options.proto:46:2
+desc_test_options.proto:46:10
+
+
+ > message_type[0] > field[0] > type:
+desc_test_options.proto:46:11
+desc_test_options.proto:46:17
+
+
+ > message_type[0] > field[0] > name:
+desc_test_options.proto:46:18
+desc_test_options.proto:46:20
+
+
+ > message_type[0] > field[0] > number:
+desc_test_options.proto:46:23
+desc_test_options.proto:46:24
+
+
+ > message_type[0] > field[1]:
+desc_test_options.proto:47:2
+desc_test_options.proto:47:27
+
+
+ > message_type[0] > field[1] > label:
+desc_test_options.proto:47:2
+desc_test_options.proto:47:10
+
+
+ > message_type[0] > field[1] > type:
+desc_test_options.proto:47:11
+desc_test_options.proto:47:17
+
+
+ > message_type[0] > field[1] > name:
+desc_test_options.proto:47:18
+desc_test_options.proto:47:22
+
+
+ > message_type[0] > field[1] > number:
+desc_test_options.proto:47:25
+desc_test_options.proto:47:26
+
+
+ > enum_type:
+desc_test_options.proto:51:1
+desc_test_options.proto:53:2
+
+
+ > enum_type[0]:
+desc_test_options.proto:51:1
+desc_test_options.proto:53:2
+    Leading comments:
+ Test enum used by custom options
+
+
+ > enum_type[0] > name:
+desc_test_options.proto:51:6
+desc_test_options.proto:51:22
+
+
+ > enum_type[0] > value:
+desc_test_options.proto:52:2
+desc_test_options.proto:52:12
+
+
+ > enum_type[0] > value[0]:
+desc_test_options.proto:52:2
+desc_test_options.proto:52:12
+
+
+ > enum_type[0] > value[0] > name:
+desc_test_options.proto:52:2
+desc_test_options.proto:52:7
+
+
+ > enum_type[0] > value[0] > number:
+desc_test_options.proto:52:10
+desc_test_options.proto:52:11
+
+
+ > extension[17] > extendee:
+desc_test_options.proto:55:8
+desc_test_options.proto:55:45
+
+
+ > extension[18] > extendee:
+desc_test_options.proto:55:8
+desc_test_options.proto:55:45
+
+
+ > extension[17]:
+desc_test_options.proto:56:2
+desc_test_options.proto:56:34
+
+
+ > extension[17] > label:
+desc_test_options.proto:56:2
+desc_test_options.proto:56:10
+
+
+ > extension[17] > type:
+desc_test_options.proto:56:11
+desc_test_options.proto:56:17
+
+
+ > extension[17] > name:
+desc_test_options.proto:56:18
+desc_test_options.proto:56:25
+
+
+ > extension[17] > number:
+desc_test_options.proto:56:28
+desc_test_options.proto:56:33
+
+
+ > extension[18]:
+desc_test_options.proto:57:2
+desc_test_options.proto:57:34
+
+
+ > extension[18] > label:
+desc_test_options.proto:57:2
+desc_test_options.proto:57:10
+
+
+ > extension[18] > type:
+desc_test_options.proto:57:11
+desc_test_options.proto:57:16
+
+
+ > extension[18] > name:
+desc_test_options.proto:57:17
+desc_test_options.proto:57:25
+
+
+ > extension[18] > number:
+desc_test_options.proto:57:28
+desc_test_options.proto:57:33
+
+
+ > extension[19] > extendee:
+desc_test_options.proto:60:8
+desc_test_options.proto:60:36
+
+
+ > extension[20] > extendee:
+desc_test_options.proto:60:8
+desc_test_options.proto:60:36
+
+
+ > extension[19]:
+desc_test_options.proto:61:2
+desc_test_options.proto:61:34
+
+
+ > extension[19] > label:
+desc_test_options.proto:61:2
+desc_test_options.proto:61:10
+
+
+ > extension[19] > type:
+desc_test_options.proto:61:11
+desc_test_options.proto:61:17
+
+
+ > extension[19] > name:
+desc_test_options.proto:61:18
+desc_test_options.proto:61:25
+
+
+ > extension[19] > number:
+desc_test_options.proto:61:28
+desc_test_options.proto:61:33
+
+
+ > extension[20]:
+desc_test_options.proto:62:2
+desc_test_options.proto:62:34
+
+
+ > extension[20] > label:
+desc_test_options.proto:62:2
+desc_test_options.proto:62:10
+
+
+ > extension[20] > type:
+desc_test_options.proto:62:11
+desc_test_options.proto:62:16
+
+
+ > extension[20] > name:
+desc_test_options.proto:62:17
+desc_test_options.proto:62:25
+
+
+ > extension[20] > number:
+desc_test_options.proto:62:28
+desc_test_options.proto:62:33