Martin Cosyns | 0efdc87 | 2021-09-27 16:24:30 +0000 | [diff] [blame] | 1 | # Copyright 2020-present Open Networking Foundation |
| 2 | # Original copyright 2020-present ADTRAN, Inc. |
| 3 | # |
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | # you may not use this file except in compliance with the License. |
| 6 | # You may obtain a copy of the License at |
| 7 | # |
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | # |
| 10 | # Unless required by applicable law or agreed to in writing, software |
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | # See the License for the specific language governing permissions and |
| 14 | import os |
| 15 | import re |
| 16 | import glob |
| 17 | import json |
| 18 | import argparse |
| 19 | |
| 20 | try: |
| 21 | from . import protobuf_parse as parser |
| 22 | except ImportError: |
| 23 | import protobuf_parse as parser |
| 24 | |
| 25 | __version__ = '1.0' |
| 26 | |
| 27 | USAGE = """ProtoBuf -- Parser of Protocol Buffer to create the input JSON file for the library |
| 28 | |
| 29 | Usage: grpc_robot.protop [options] target_version |
| 30 | |
| 31 | ProtoBuf parser can be used to parse ProtoBuf files (*.proto) into a json formatted input file |
| 32 | for the grpc_robot library to be used for keyword documentation. |
| 33 | |
| 34 | """ |
| 35 | |
| 36 | EPILOG = """ |
| 37 | Example |
| 38 | ======= |
| 39 | # Executing `grpc_robot.protop` module using Python. |
| 40 | $ grpc_robot.protop -i /home/user/Workspace/grpc/proto/dmi 0.9.1 |
| 41 | """ |
| 42 | |
| 43 | |
| 44 | class ProtoBufParser(object): |
| 45 | |
| 46 | def __init__(self, target, target_version, input_dir, output_dir=None): |
| 47 | |
| 48 | super().__init__() |
| 49 | |
| 50 | self.target = target |
| 51 | self.target_version = target_version.replace('.', '_') |
| 52 | self.input_dir = input_dir |
| 53 | self.output_dir = output_dir |
| 54 | |
| 55 | @staticmethod |
| 56 | def read_enum(enum, protobuf_dict, module): |
| 57 | enum_dict = {'name': enum.name, 'type': 'enum', 'module': module, 'values': {ef.value: ef.name for ef in enum.body}} |
| 58 | protobuf_dict['data_types'].append(enum_dict) |
| 59 | |
| 60 | def read_message(self, message, protobuf_dict, module): |
| 61 | message_dict = {'name': message.name, 'type': 'message', 'module': module, 'fields': []} |
| 62 | |
| 63 | for f in message.body: |
| 64 | |
| 65 | if f is None: |
| 66 | continue |
| 67 | |
| 68 | if isinstance(f, parser.Enum): |
| 69 | self.read_enum(f, protobuf_dict, module) |
| 70 | continue |
| 71 | |
| 72 | elif isinstance(f, parser.Message): |
| 73 | self.read_message(f, protobuf_dict, module) |
| 74 | continue |
| 75 | |
| 76 | field_dict = {'name': f.name, 'is_choice': isinstance(f, parser.OneOf)} |
| 77 | |
| 78 | if isinstance(f, parser.Field): |
| 79 | field_dict['repeated'] = f.repeated |
| 80 | |
| 81 | try: |
| 82 | field_dict['type'] = f.type._value_ |
| 83 | field_dict['lookup'] = False |
| 84 | except AttributeError: |
| 85 | field_dict['type'] = f.type |
| 86 | field_dict['lookup'] = True |
| 87 | |
| 88 | elif isinstance(f, parser.OneOf): |
| 89 | field_dict['cases'] = [] |
| 90 | for c in f.fields: |
| 91 | case_dict = {'name': c.name} |
| 92 | try: |
| 93 | case_dict['type'] = c.type._value_ |
| 94 | case_dict['lookup'] = False |
| 95 | except AttributeError: |
| 96 | case_dict['type'] = c.type |
| 97 | case_dict['lookup'] = True |
| 98 | field_dict['cases'].append(case_dict) |
| 99 | |
| 100 | message_dict['fields'].append(field_dict) |
| 101 | |
| 102 | protobuf_dict['data_types'].append(message_dict) |
| 103 | |
| 104 | def parse_files(self): |
| 105 | |
| 106 | protobuf_dict = { |
| 107 | 'modules': [], |
| 108 | 'data_types': [], |
| 109 | 'services': [] |
| 110 | } |
| 111 | |
| 112 | for file_name in glob.glob(os.path.join(self.input_dir, '*.proto')): |
| 113 | print(file_name) |
| 114 | |
| 115 | module = os.path.splitext(os.path.basename(file_name))[0] |
| 116 | module_dict = {'name': module, 'imports': []} |
| 117 | |
| 118 | # the protobuf parser can not handle comments "// ...", so remove them first from the file |
| 119 | file_content = re.sub(r'\/\/.*', '', open(file_name).read()) |
| 120 | parsed = parser.proto.parse(file_content) |
| 121 | |
| 122 | # print(parsed.statements) |
| 123 | |
| 124 | for p in parsed.statements: |
| 125 | # print(p) |
| 126 | |
| 127 | if isinstance(p, parser.Import): |
| 128 | module_dict['imports'].append(os.path.splitext(os.path.basename(p.identifier))[0]) |
| 129 | |
| 130 | elif isinstance(p, parser.Enum): |
| 131 | self.read_enum(p, protobuf_dict, module) |
| 132 | |
| 133 | elif isinstance(p, parser.Message): |
| 134 | self.read_message(p, protobuf_dict, module) |
| 135 | |
| 136 | elif isinstance(p, parser.Service): |
| 137 | service_dict = {'name': p.name, 'module': module, 'rpcs': []} |
| 138 | |
| 139 | for field in p.body: |
| 140 | |
| 141 | if isinstance(field, parser.Enum): |
| 142 | self.read_enum(field, protobuf_dict, module) |
| 143 | |
| 144 | elif isinstance(field, parser.Message): |
| 145 | self.read_message(field, protobuf_dict, module) |
| 146 | |
| 147 | elif isinstance(field, parser.Rpc): |
| 148 | rpc_dict = {'name': field.name, 'request': {}, 'response': {}} |
| 149 | |
| 150 | for attr in ['request', 'response']: |
| 151 | try: |
| 152 | rpc_dict[attr]['is_stream'] = field.__getattribute__('%s_stream' % attr) |
| 153 | |
| 154 | try: |
| 155 | rpc_dict[attr]['type'] = field.__getattribute__('%s_message_type' % attr)._value_ |
| 156 | rpc_dict[attr]['lookup'] = False |
| 157 | except AttributeError: |
| 158 | rpc_dict[attr]['type'] = field.__getattribute__('%s_message_type' % attr) |
| 159 | rpc_dict[attr]['lookup'] = not rpc_dict[attr]['type'].lower().startswith('google.protobuf.') |
| 160 | |
| 161 | except AttributeError: |
| 162 | rpc_dict[attr] = None |
| 163 | |
| 164 | service_dict['rpcs'].append(rpc_dict) |
| 165 | |
| 166 | protobuf_dict['services'].append(service_dict) |
| 167 | |
| 168 | protobuf_dict['modules'].append(module_dict) |
| 169 | |
| 170 | if self.output_dir is not None: |
| 171 | json_file_name = os.path.join(self.output_dir, self.target, '%s_%s' % (self.target, self.target_version), '%s.json' % self.target) |
| 172 | json.dump(protobuf_dict, open(json_file_name, 'w')) |
| 173 | |
| 174 | return protobuf_dict |
| 175 | |
| 176 | |
| 177 | base_dir = os.path.dirname(os.path.realpath(__file__)) |
| 178 | output_base_dir = os.path.join(os.path.split(base_dir)[:-1][0], 'services') |
| 179 | |
| 180 | |
| 181 | def main(): |
| 182 | # create commandline parser |
| 183 | arg_parse = argparse.ArgumentParser(description=USAGE, epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter) |
| 184 | |
| 185 | # add parser options |
| 186 | arg_parse.add_argument('target', choices=['dmi', 'voltha'], |
| 187 | help="Target type of which the ProtocolBuffer files shall be converted to the JSON file.") |
| 188 | arg_parse.add_argument('target_version', help="Version number of the ProtocolBuffer files.") |
| 189 | |
| 190 | arg_parse.add_argument('-i', '--inputdir', default=os.getcwd(), help="Path to the location of the ProtocolBuffer files.") |
| 191 | arg_parse.add_argument('-o', '--outputdir', default=os.getcwd(), help="Path to the location JSON file to be stored.") |
| 192 | |
| 193 | arg_parse.add_argument('-v', '--version', action='version', version=__version__) |
| 194 | arg_parse.set_defaults(feature=False) |
| 195 | |
| 196 | # parse commandline |
| 197 | args = arg_parse.parse_args() |
| 198 | |
| 199 | ProtoBufParser(args.target, args.target_version, args.inputdir or os.getcwd(), args.outputdir or output_base_dir).parse_files() |
| 200 | |
| 201 | |
| 202 | if __name__ == '__main__': |
| 203 | main() |