blob: 71a698b3e4e3ef1566877df5aca08ff757f63025 [file] [log] [blame]
Zsolt Haraszti46c72002016-10-10 09:55:30 -07001#!/usr/bin/env python
Matteo Scandolo11d074c2017-08-29 13:29:37 -07002
Zsolt Harasztiaccad4a2017-01-03 21:56:48 -08003# Copyright 2017 the original author or authors.
Zsolt Haraszti46c72002016-10-10 09:55:30 -07004#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16#
17
18"""
19Load a protobuf description file or protoc CodeGeneratorRequest an make
20sense of it
21"""
22
23import os
24import inspect
25from collections import OrderedDict
26
27import sys
28
29from google.protobuf.compiler.plugin_pb2 import CodeGeneratorRequest
30from google.protobuf.descriptor import FieldDescriptor, Descriptor
31from google.protobuf.descriptor_pb2 import FileDescriptorProto, MethodOptions
32from google.protobuf.message import Message, DecodeError
33from simplejson import dumps
34
35from google.protobuf import descriptor_pb2
36
37
Zack Williams7eb36d02019-03-19 07:16:12 -070038class InvalidDescriptorError(Exception):
39 pass
Zsolt Haraszti46c72002016-10-10 09:55:30 -070040
41
42class DescriptorParser(object):
43
44 def __init__(self, ignore_empty_source_code_info=True):
45 self.ignore_empty_source_code_info = ignore_empty_source_code_info
46 self.catalog = {}
47 self.meta, blob = self.load_root_descriptor()
48 self.load_descriptor(blob)
49
50 def load_root_descriptor(self):
51 """Load descriptor.desc to make things more data driven"""
52 with open('descriptor.desc', 'r') as f:
53 blob = f.read()
54 proto = descriptor_pb2.FileDescriptorSet()
55 proto.ParseFromString(blob)
56 assert len(proto.file) == 1
57 fdp = proto.file[0]
58
59 # for i, (fd, v) in enumerate(fdp.ListFields()):
60 # assert isinstance(fd, FieldDescriptor)
61 # print fd.name, fd.full_name, fd.number, fd.type, fd.label, fd.message_type, type(v)
62
63 return fdp, blob
64
65 def get_catalog(self):
66 return self.catalog
67
68 def load_descriptor(self, descriptor_blob,
69 fold_comments=True,
70 type_tag_name='_type'):
71
72 # decode file descriptor set or if that is not possible,
73 # try plugin request
74 try:
75 message = descriptor_pb2.FileDescriptorSet()
76 message.ParseFromString(descriptor_blob)
77 except DecodeError:
78 message = CodeGeneratorRequest()
79 message.ParseFromString(descriptor_blob)
80
81 d = self.parse(message, type_tag_name=type_tag_name)
82 print d.keys()
83 for _file in d.get('file', None) or d['proto_file']:
84 if fold_comments:
85 self.fold_comments_in(_file)
86 self.catalog[_file['package']] = _file
87
88 def parse_message(self, m, type_tag_name=None):
89 assert isinstance(m, Message)
90 d = OrderedDict()
91 for fd, v in m.ListFields():
92 assert isinstance(fd, FieldDescriptor)
93 if fd.label in (1, 2):
94 d[fd.name] = self.parse(v, type_tag_name)
95 elif fd.label == 3:
96 d[fd.name] = [self.parse(x, type_tag_name) for x in v]
97 else:
98 raise InvalidDescriptorError()
99
100 if type_tag_name is not None:
101 d[type_tag_name] = m.DESCRIPTOR.full_name
102
103 return d
104
105 parser_table = {
106 unicode: lambda x: x,
107 int: lambda x: x,
108 bool: lambda x: x,
109 }
110
111 def parse(self, o, type_tag_name=None):
112 if isinstance(o, Message):
113 return self.parse_message(o, type_tag_name)
114 else:
115 return self.parser_table[type(o)](o)
116
117 def fold_comments_in(self, descriptor):
118 assert isinstance(descriptor, dict)
119
120 locations = descriptor.get('source_code_info', {}).get('location', [])
121 for location in locations:
122 path = location.get('path', [])
123 comments = ''.join([
124 location.get('leading_comments', '').strip(' '),
125 location.get('trailing_comments', '').strip(' '),
126 ''.join(block.strip(' ') for block
Zack Williams7eb36d02019-03-19 07:16:12 -0700127 in location.get('leading_detached_comments', ''))
Zsolt Haraszti46c72002016-10-10 09:55:30 -0700128 ]).strip()
129
130 # ignore locations with no comments
131 if not comments:
132 continue
133
134 # we ignore path with odd number of entries, since these do
135 # not address our schema nodes, but rather the meta schema
136 if (len(path) % 2 == 0):
137 node = self.find_node_by_path(
138 path, self.meta.DESCRIPTOR, descriptor)
139 assert isinstance(node, dict)
140 node['_description'] = comments
141
142 # remove source_code_info
143 del descriptor['source_code_info']
144
145 def find_node_by_path(self, path, meta, o):
146
147 # stop recursion when path is empty
148 if not path:
149 return o
150
151 # sanity check
152 assert len(path) >= 2
153 assert isinstance(meta, Descriptor)
154 assert isinstance(o, dict)
155
156 # find field name, then actual field
157 field_number = path.pop(0)
158 field_def = meta.fields_by_number[field_number]
159 field = o[field_def.name]
160
161 # field must be a list, extract entry with given index
162 assert isinstance(field, list) # expected to be a list field
163 index = path.pop(0)
164 child_o = field[index]
165
166 child_meta = field_def.message_type
167 return self.find_node_by_path(path, child_meta, child_o)
168
169
170if __name__ == '__main__':
171
172 # try loading voltha descriptor and turn it into JSON data as a preparation
173 # for generating JSON Schema / swagger file (to be done later)
174 if len(sys.argv) >= 2:
175 desc_file = sys.argv[1]
176 else:
177 desc_dir = os.path.dirname(inspect.getfile(voltha_pb2))
178 desc_file = os.path.join(desc_dir, 'voltha.desc')
179
180 from voltha.protos import voltha_pb2
181 with open(desc_file, 'rb') as f:
182 descriptor_blob = f.read()
183
184 parser = DescriptorParser()
185 parser.save_file_desc = '/tmp/grpc_introspection.out'
186
187 parser.load_descriptor(descriptor_blob)
188 print dumps(parser.get_catalog(), indent=4)
189 sys.exit(0)
190
191 # try to see if we can decode binary data into JSON automatically
192 from random import seed, randint
193 seed(0)
194
195 def make_mc(name, n_children=0):
196 mc = voltha_pb2.MoreComplex(
197 name=name,
198 foo_counter=randint(0, 10000),
199 health=voltha_pb2.HealthStatus(
200 state=voltha_pb2.HealthStatus.OVERLOADED
201 ),
202 address=voltha_pb2.Address(
203 street='1383 N McDowell Blvd',
204 city='Petaluma',
205 zip=94954,
206 state='CA'
207 ),
208 children=[make_mc('child%d' % (i + 1)) for i in xrange(n_children)]
209 )
210 return mc
211
212 mc = make_mc('root', 3)
213 blob = mc.SerializeToString()
214 print len(blob), 'bytes'
215 mc2 = voltha_pb2.MoreComplex()
216 mc2.ParseFromString(blob)
217 assert mc == mc2
218
219 print dumps(parser.parse(mc, type_tag_name='_type'), indent=4)