Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 1 | #!/usr/bin/env python |
| 2 | # |
| 3 | # Copyright 2016 the original author or authors. |
| 4 | # |
| 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | # you may not use this file except in compliance with the License. |
| 7 | # You may obtain a copy of the License at |
| 8 | # |
| 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | # |
| 11 | # Unless required by applicable law or agreed to in writing, software |
| 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | # See the License for the specific language governing permissions and |
| 15 | # limitations under the License. |
| 16 | # |
| 17 | |
| 18 | """protoc plugin to convert a protobuf schema to a yang schema |
| 19 | |
| 20 | - basic support for message, fields. enumeration, service, method |
| 21 | |
| 22 | - yang semantic rules needs to be implemented |
| 23 | |
| 24 | - to run this plugin : |
| 25 | |
| 26 | $ python -m grpc.tools.protoc -I. |
| 27 | --plugin=protoc-gen-custom=./proto2yang.py --custom_out=. <proto file>.proto |
| 28 | |
| 29 | - the above will produce a ietf-<proto file>.yang file formatted for yang |
| 30 | |
| 31 | - two examples of proto that can be used in the same directory are |
| 32 | yang.proto and addressbook.proto |
| 33 | |
| 34 | """ |
| 35 | |
| 36 | import sys |
| 37 | |
| 38 | from jinja2 import Template |
| 39 | from google.protobuf.compiler import plugin_pb2 as plugin |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 40 | from google.protobuf.descriptor_pb2 import DescriptorProto, FieldDescriptorProto |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 41 | from descriptor_parser import DescriptorParser |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 42 | import copy |
| 43 | import yang_options_pb2 |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 44 | |
| 45 | from google.protobuf.descriptor import FieldDescriptor |
| 46 | |
| 47 | template_yang = Template(""" |
| 48 | module ietf-{{ module.name }} { |
| 49 | |
| 50 | {% macro set_module_prefix(type) %} |
| 51 | {% for t in module.data_types %} |
| 52 | {% if t.type == type %} |
| 53 | {% if t.module != module.name %} {{ t.module }}:{{ type }}; |
| 54 | {% else %} {{ type }}; |
| 55 | {% endif %} |
| 56 | {% set found=True %} |
| 57 | {% endif %} |
| 58 | {% if loop.last %} |
| 59 | {% if not found %} {{ type }}; {% endif %} |
| 60 | {% endif %} |
| 61 | {% endfor %} |
| 62 | {% endmacro %} |
| 63 | |
| 64 | namespace "urn:opencord:params:xml:ns:voltha:ietf-{{ module.name }}"; |
| 65 | prefix {{ module.name }}; |
| 66 | |
| 67 | {% for imp in module.imports %} |
| 68 | import ietf-{{ imp.name }} { prefix {{ imp.name }} ; } |
| 69 | {% endfor %} |
| 70 | |
| 71 | organization "CORD"; |
| 72 | contact |
| 73 | " Any name"; |
| 74 | |
| 75 | description |
| 76 | "{{ module.description }}"; |
| 77 | |
| 78 | revision "2016-11-15" { |
| 79 | description "Initial revision."; |
| 80 | reference "reference"; |
| 81 | } |
| 82 | |
| 83 | {% for enum in module.enums %} |
| 84 | typedef {{ enum.name }} { |
| 85 | type enumeration { |
| 86 | {% for v in enum.value %} |
| 87 | enum {{ v.name }} { |
| 88 | description "{{ v.description }}"; |
| 89 | } |
| 90 | {% endfor %} |
| 91 | } |
| 92 | description |
| 93 | "{{ enum.description }}"; |
| 94 | } |
| 95 | {% endfor %} |
| 96 | |
| 97 | {% for message in module.messages recursive %} |
| 98 | {% if message.name in module.referred_messages %} |
| 99 | grouping {{ message.name }} { |
| 100 | {% else %} |
| 101 | container {{ message.name }} { |
| 102 | {% endif %} |
| 103 | description |
| 104 | "{{ message.description }}"; |
| 105 | {% for field in message.fields %} |
| 106 | {% if field.type_ref %} |
| 107 | {% for dict_item in module.referred_messages_with_keys %} |
| 108 | {% if dict_item.name == field.type %} |
| 109 | {% if not field.repeated %} |
| 110 | container {{ field.name }} { |
| 111 | {% else %} |
| 112 | list {{ field.name }} { |
| 113 | key "{{ dict_item.key }}"; |
| 114 | {% if not field.repeated %} |
| 115 | max-elements 1; |
| 116 | {% endif %} |
| 117 | {% endif %} |
| 118 | uses {{ set_module_prefix(field.type) }} |
| 119 | description |
| 120 | "{{ field.description }}"; |
| 121 | } |
| 122 | {% endif %} |
| 123 | {% endfor %} |
| 124 | {% elif field.repeated %} |
| 125 | list {{ field.name }} { |
| 126 | key "{{ field.name }}"; |
| 127 | leaf {{ field.name }} { |
| 128 | {% if field.type == "decimal64" %} |
| 129 | type {{ field.type }} { |
| 130 | fraction-digits 5; |
| 131 | } |
| 132 | {% else %} |
| 133 | type {{ set_module_prefix(field.type) }} |
| 134 | {% endif %} |
| 135 | description |
| 136 | "{{ field.description }}"; |
| 137 | } |
| 138 | description |
| 139 | "{{ field.description }}"; |
| 140 | } |
| 141 | {% else %} |
| 142 | leaf {{ field.name }} { |
| 143 | {% if field.type == "decimal64" %} |
| 144 | type {{ field.type }} { |
| 145 | fraction-digits 5; |
| 146 | } |
| 147 | {% else %} |
| 148 | type {{ set_module_prefix(field.type) }} |
| 149 | {% endif %} |
| 150 | description |
| 151 | "{{ field.description }}"; |
| 152 | } |
| 153 | {% endif %} |
| 154 | |
| 155 | {% endfor %} |
| 156 | {% for enum_type in message.enums %} |
| 157 | typedef {{ enum_type.name }} { |
| 158 | type enumeration { |
| 159 | {% for v in enum_type.value %} |
| 160 | enum {{ v.name }} { |
| 161 | description "{{ v.description }}"; |
| 162 | } |
| 163 | {% endfor %} |
| 164 | } |
| 165 | description |
| 166 | "{{ enum_type.description }}"; |
| 167 | } |
| 168 | |
| 169 | {% endfor %} |
| 170 | {% if message.messages %} |
| 171 | {{ loop (message.messages)|indent(4, false) }} |
| 172 | {% endif %} |
| 173 | } |
| 174 | |
| 175 | {% endfor %} |
| 176 | {% for service in module.services %} |
| 177 | {% if service.description %} |
| 178 | /* {{ service.description }}" */ |
| 179 | {% endif %} |
| 180 | {% for method in service.methods %} |
| 181 | rpc {{ service.service }}-{{ method.method }} { |
| 182 | description |
| 183 | "{{ method.description }}"; |
| 184 | {% if method.input %} |
| 185 | input { |
| 186 | {% if method.input_ref %} |
| 187 | uses {{ set_module_prefix(method.input) }} |
| 188 | {% else %} |
| 189 | leaf {{ method.input }} { |
| 190 | type {{ set_module_prefix(method.input) }} |
| 191 | } |
| 192 | {% endif %} |
| 193 | } |
| 194 | {% endif %} |
| 195 | {% if method.output %} |
| 196 | output { |
| 197 | {% if method.output_ref %} |
| 198 | uses {{ set_module_prefix(method.output) }} |
| 199 | {% else %} |
| 200 | leaf {{ method.output }} { |
| 201 | type {{ set_module_prefix(method.output) }} |
| 202 | } |
| 203 | {% endif %} |
| 204 | } |
| 205 | {% endif %} |
| 206 | } |
| 207 | |
| 208 | {% endfor %} |
| 209 | |
| 210 | {% endfor %} |
| 211 | } |
| 212 | """, trim_blocks=True, lstrip_blocks=True) |
| 213 | |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 214 | |
| 215 | def traverse_field_options(fields, prefix): |
| 216 | field_options = [] |
| 217 | for field in fields: |
| 218 | assert isinstance(field, FieldDescriptorProto) |
| 219 | full_name = prefix + '-' + field.name |
| 220 | option = None |
| 221 | if field.type == FieldDescriptor.TYPE_MESSAGE and field.label != \ |
| 222 | FieldDescriptor.LABEL_REPEATED: |
| 223 | if field.options: |
| 224 | for fd, val in field.options.ListFields(): |
| 225 | if fd.full_name == 'voltha.yang_inline_node': |
| 226 | field_options.append( |
| 227 | {'name' : full_name, |
| 228 | 'option' : fd.full_name, |
| 229 | 'proto_name' : val.id, |
| 230 | 'proto_type' : val.type |
| 231 | } |
| 232 | ) |
| 233 | return field_options |
| 234 | |
| 235 | |
| 236 | def traverse_message_options(message_types, prefix): |
| 237 | message_options=[] |
| 238 | for message_type in message_types: |
| 239 | assert isinstance(message_type, DescriptorProto) |
| 240 | full_name = prefix + '-' + message_type.name |
| 241 | option_rules = [] |
| 242 | |
| 243 | options = message_type.options |
| 244 | if options: |
| 245 | for fd, val in options.ListFields(): |
| 246 | if fd.full_name in ['voltha.yang_child_rule', |
| 247 | 'voltha.yang_message_rule']: |
| 248 | option_rules.append({ |
| 249 | 'name' : fd.full_name, |
| 250 | 'value' : val |
| 251 | }) |
| 252 | |
| 253 | # parse fields for options |
| 254 | field_options = traverse_field_options(message_type.field, |
| 255 | full_name) |
| 256 | |
| 257 | # parse nested messages |
| 258 | nested_messages_options = [] |
| 259 | nested = message_type.nested_type |
| 260 | if nested: |
| 261 | nested_messages_options = traverse_message_options(nested, |
| 262 | full_name) |
| 263 | |
| 264 | if option_rules or nested_messages_options or field_options: |
| 265 | message_options.append( |
| 266 | { |
| 267 | 'name': full_name, |
| 268 | 'options': option_rules, |
| 269 | 'field_options' : field_options, |
| 270 | 'nested_options': nested_messages_options, |
| 271 | } |
| 272 | ) |
| 273 | return message_options |
| 274 | |
| 275 | |
| 276 | def get_message_options(name, options): |
| 277 | result = None |
| 278 | for opt in options: |
| 279 | if opt['name'] == name: |
| 280 | return opt['options'] |
| 281 | if opt['nested_options']: |
| 282 | result = get_message_options(name, opt['nested_options']) |
| 283 | if result: |
| 284 | return result |
| 285 | |
| 286 | def get_field_options(name, options): |
| 287 | result = None |
| 288 | for opt in options: |
| 289 | if opt['field_options']: |
| 290 | for field_opt in opt['field_options']: |
| 291 | if field_opt['name'] == name: |
| 292 | result = field_opt |
| 293 | if opt['nested_options']: |
| 294 | result = get_field_options(name, opt['nested_options']) |
| 295 | if result: |
| 296 | return result |
| 297 | |
| 298 | |
| 299 | def traverse_options(proto_file): |
| 300 | package = proto_file.name |
| 301 | prefix = package.replace('.proto', '') |
| 302 | if proto_file.message_type: |
| 303 | message_options = traverse_message_options(proto_file.message_type, |
| 304 | prefix) |
| 305 | return message_options |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 306 | |
| 307 | |
| 308 | def traverse_messages(message_types, prefix, referenced_messages): |
| 309 | messages = [] |
| 310 | for message_type in message_types: |
| 311 | assert message_type['_type'] == 'google.protobuf.DescriptorProto' |
| 312 | |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 313 | full_name = prefix + '-' + message_type['name'] |
| 314 | name = message_type['name'] |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 315 | |
| 316 | # parse the fields |
| 317 | fields = traverse_fields(message_type.get('field', []), full_name, |
| 318 | referenced_messages) |
| 319 | |
| 320 | # parse the enums |
| 321 | enums = traverse_enums(message_type.get('enum_type', []), full_name) |
| 322 | |
| 323 | # parse nested messages |
| 324 | nested = message_type.get('nested_type', []) |
| 325 | nested_messages = traverse_messages(nested, full_name, |
| 326 | referenced_messages) |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 327 | |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 328 | messages.append( |
| 329 | { |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 330 | 'full_name': full_name, |
| 331 | 'name': name, |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 332 | 'fields': fields, |
| 333 | 'enums': enums, |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 334 | 'messages': nested_messages, |
| 335 | 'description': remove_unsupported_characters( |
| 336 | message_type.get('_description', '')), |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 337 | } |
| 338 | ) |
| 339 | return messages |
| 340 | |
| 341 | |
| 342 | def traverse_fields(fields_desc, prefix, referenced_messages): |
| 343 | fields = [] |
| 344 | for field in fields_desc: |
| 345 | assert field['_type'] == 'google.protobuf.FieldDescriptorProto' |
| 346 | yang_base_type = is_base_type(field['type']) |
| 347 | _type = get_yang_type(field) |
| 348 | if not yang_base_type: |
| 349 | referenced_messages.append(_type) |
| 350 | # add to referred messages also if it is an enumeration type |
| 351 | if is_enumeration(field['type']): |
| 352 | referenced_messages.append(_type) |
| 353 | |
| 354 | fields.append( |
| 355 | { |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 356 | 'full_name': prefix + '-' + field.get('name', ''), |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 357 | 'name': field.get('name', ''), |
| 358 | 'label': field.get('label', ''), |
| 359 | 'repeated': field['label'] == FieldDescriptor.LABEL_REPEATED, |
| 360 | 'number': field.get('number', ''), |
| 361 | 'options': field.get('options', ''), |
| 362 | 'type_name': field.get('type_name', ''), |
| 363 | 'type': _type, |
| 364 | 'type_ref': not yang_base_type, |
| 365 | 'description': remove_unsupported_characters(field.get( |
| 366 | '_description', '')) |
| 367 | } |
| 368 | ) |
| 369 | return fields |
| 370 | |
| 371 | |
| 372 | def traverse_enums(enums_desc, prefix): |
| 373 | enums = [] |
| 374 | for enum in enums_desc: |
| 375 | assert enum['_type'] == 'google.protobuf.EnumDescriptorProto' |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 376 | full_name = prefix + '-' + enum.get('name', '') |
| 377 | name = enum.get('name', '') |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 378 | enums.append( |
| 379 | { |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 380 | 'full_name': full_name, |
| 381 | 'name': name, |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 382 | 'value': enum.get('value', ''), |
| 383 | 'description': remove_unsupported_characters(enum.get( |
| 384 | '_description', '')) |
| 385 | } |
| 386 | ) |
| 387 | return enums |
| 388 | |
| 389 | |
| 390 | def traverse_services(service_desc, referenced_messages): |
| 391 | services = [] |
| 392 | for service in service_desc: |
| 393 | methods = [] |
| 394 | for method in service.get('method', []): |
| 395 | assert method['_type'] == 'google.protobuf.MethodDescriptorProto' |
| 396 | |
| 397 | input_name = method.get('input_type') |
| 398 | input_ref = False |
| 399 | if not is_base_type(input_name): |
| 400 | input_name = remove_first_character_if_match(input_name, '.') |
| 401 | # input_name = input_name.replace(".", "-") |
| 402 | input_name = input_name.split('.')[-1] |
| 403 | referenced_messages.append(input_name) |
| 404 | input_ref = True |
| 405 | |
| 406 | output_name = method.get('output_type') |
| 407 | output_ref = False |
| 408 | if not is_base_type(output_name): |
| 409 | output_name = remove_first_character_if_match(output_name, '.') |
| 410 | # output_name = output_name.replace(".", "-") |
| 411 | output_name = output_name.split('.')[-1] |
| 412 | referenced_messages.append(output_name) |
| 413 | output_ref = True |
| 414 | |
| 415 | methods.append( |
| 416 | { |
| 417 | 'method': method.get('name', ''), |
| 418 | 'input': input_name, |
| 419 | 'input_ref': input_ref, |
| 420 | 'output': output_name, |
| 421 | 'output_ref': output_ref, |
| 422 | 'description': remove_unsupported_characters(method.get( |
| 423 | '_description', '')), |
| 424 | 'server_streaming': method.get('server_streaming', |
| 425 | False) == True |
| 426 | } |
| 427 | ) |
| 428 | services.append( |
| 429 | { |
| 430 | 'service': service.get('name', ''), |
| 431 | 'methods': methods, |
| 432 | 'description': remove_unsupported_characters(service.get( |
| 433 | '_description', '')), |
| 434 | } |
| 435 | ) |
| 436 | return services |
| 437 | |
| 438 | |
| 439 | def rchop(thestring, ending): |
| 440 | if thestring.endswith(ending): |
| 441 | return thestring[:-len(ending)] |
| 442 | return thestring |
| 443 | |
| 444 | |
| 445 | def traverse_desc(descriptor): |
| 446 | referenced_messages = [] |
| 447 | name = rchop(descriptor.get('name', ''), '.proto') |
| 448 | package = descriptor.get('package', '') |
| 449 | description = descriptor.get('_description', '') |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 450 | messages = traverse_messages(descriptor.get('message_type', []), |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 451 | name, referenced_messages) |
| 452 | enums = traverse_enums(descriptor.get('enum_type', []), name) |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 453 | services = traverse_services(descriptor.get('service', []), |
| 454 | referenced_messages) |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 455 | |
| 456 | # Get a list of type definitions (messages, enums) defined in this |
| 457 | # descriptor |
| 458 | defined_types = [m['name'].split('/')[-1] for m in messages] + \ |
| 459 | [e['name'].split('/')[-1] for e in enums] |
| 460 | |
| 461 | data = { |
| 462 | 'name': name.split('/')[-1], |
| 463 | 'package': package, |
| 464 | 'description': description, |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 465 | 'messages': messages, |
| 466 | 'enums': enums, |
| 467 | 'services': services, |
| 468 | 'defined_types' : defined_types, |
| 469 | 'referenced_messages': list(set(referenced_messages)), |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 470 | } |
| 471 | return data |
| 472 | |
| 473 | |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 474 | # For now, annotations are added to first level messages only. |
| 475 | # Therefore, at this time no need to tackle nested messages. |
| 476 | def move_message_to_parent_level(message, messages, enums): |
| 477 | new_message = [] |
| 478 | new_enum = copy.deepcopy(enums) |
| 479 | for msg in messages: |
| 480 | if msg['full_name'] == message['full_name']: |
| 481 | # Move all sub messages and enums to top level |
| 482 | if msg['messages']: |
| 483 | new_message = new_message + copy.deepcopy(msg['messages']) |
| 484 | if msg['enums']: |
| 485 | new_enum = new_enum + copy.deepcopy(msg['enums']) |
| 486 | |
| 487 | # if the message has some fields then enclose them in a container |
| 488 | if msg['fields']: |
| 489 | new_message.append( |
| 490 | { |
| 491 | 'full_name': msg['full_name'], |
| 492 | 'name': msg['name'], |
| 493 | 'fields': msg['fields'], |
| 494 | 'description': msg['description'], |
| 495 | 'messages': [], |
| 496 | 'enums': [] |
| 497 | } |
| 498 | ) |
| 499 | else: |
| 500 | new_message.append(msg) |
| 501 | |
| 502 | return new_message, new_enum |
| 503 | |
| 504 | |
| 505 | def update_messages_per_annotations_rule(options, messages, enums): |
| 506 | new_messages = messages |
| 507 | new_enums = enums |
| 508 | # Used when a message needs to exist both as a type and a container |
| 509 | duplicate_messages = [] |
| 510 | for message in messages: |
| 511 | opts = get_message_options(message['full_name'], options) |
| 512 | if opts: |
| 513 | for opt in opts: |
| 514 | if opt['name'] == 'voltha.yang_child_rule': |
| 515 | new_messages, new_enums = move_message_to_parent_level(message, |
| 516 | new_messages, new_enums) |
| 517 | elif opt['name'] == 'voltha.yang_message_rule': |
| 518 | # create a duplicate message |
| 519 | #TODO: update references to point to the |
| 520 | duplicate_messages.append(message['name']) |
| 521 | clone = copy.deepcopy(message) |
| 522 | clone['full_name'] = ''.join([clone['full_name'], '_', 'grouping']) |
| 523 | clone['name'] = ''.join([clone['name'], '_', 'grouping']) |
| 524 | new_messages = new_messages + [clone] |
| 525 | |
| 526 | return new_messages, new_enums, duplicate_messages |
| 527 | |
| 528 | |
| 529 | def inline_field(message, field, option, messages): |
| 530 | new_message = copy.deepcopy(message) |
| 531 | new_message['fields'] = [] |
| 532 | for f in message['fields']: |
| 533 | if f['full_name'] == field['full_name']: |
| 534 | # look for the message this field referred to. |
| 535 | # Addresses only top-level messages |
| 536 | for m in messages: |
| 537 | # 'proto_type' is the name of the message type this field |
| 538 | # refers to |
| 539 | if m['full_name'] == option['proto_type']: |
| 540 | # Copy all content of m into the field |
| 541 | new_message['fields'] = new_message['fields'] + \ |
| 542 | copy.deepcopy(m['fields']) |
| 543 | new_message['enums'] = new_message['enums'] + \ |
| 544 | copy.deepcopy(m['enums']) |
| 545 | new_message['messages'] = new_message['messages'] + \ |
| 546 | copy.deepcopy(m['messages']) |
| 547 | else: |
| 548 | new_message['fields'].append(f) |
| 549 | |
| 550 | return new_message |
| 551 | |
| 552 | # Address only annotations on top-level messages, i.e. no nested messages |
| 553 | def update_fields_per_annotations_rule(options, messages): |
| 554 | new_messages = [] |
| 555 | for message in messages: |
| 556 | new_message = None |
| 557 | for field in message['fields']: |
| 558 | opt = get_field_options(field['full_name'], options) |
| 559 | if opt: |
| 560 | if opt['option'] == 'voltha.yang_inline_node': |
| 561 | new_message = inline_field(message, field, opt, messages) |
| 562 | |
| 563 | if new_message: |
| 564 | new_messages.append(new_message) |
| 565 | else: |
| 566 | new_messages.append(message) |
| 567 | |
| 568 | return new_messages |
| 569 | |
| 570 | |
| 571 | |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 572 | def set_messages_keys(messages): |
| 573 | for message in messages: |
| 574 | message['key'] = _get_message_key(message, messages) |
| 575 | if message['messages']: |
| 576 | set_messages_keys(message['messages']) |
| 577 | |
| 578 | def _get_message_key(message, messages): |
| 579 | # assume key is first yang base type field |
| 580 | for field in message['fields']: |
| 581 | if not field['type_ref']: |
| 582 | return field['name'] |
| 583 | else: |
| 584 | # if the field name is a message then loop for the key in that |
| 585 | # message |
| 586 | ref_message = _get_message(field['type'], messages) |
| 587 | if ref_message: |
| 588 | return _get_message_key(ref_message, messages) |
| 589 | |
| 590 | # no key yet - search nested messaged |
| 591 | for m in message['messages']: |
| 592 | key = _get_message_key(m, messages) |
| 593 | if key is not None: |
| 594 | return key |
| 595 | else: |
| 596 | return None |
| 597 | |
| 598 | def _get_message(name, messages): |
| 599 | for m in messages: |
| 600 | if m['name'] == name: |
| 601 | return m |
| 602 | return None |
| 603 | |
| 604 | def get_message_key(message_name, messages): |
| 605 | for message in messages: |
| 606 | if message_name == message['name']: |
| 607 | return message['key'] |
| 608 | if message['messages']: |
| 609 | return get_message_key(message_name, message['messages']) |
| 610 | return None |
| 611 | |
| 612 | |
| 613 | def update_module_imports(module): |
| 614 | used_imports = set() |
| 615 | for ref_msg in module['referenced_messages']: |
| 616 | for type_dict in module['data_types']: |
| 617 | if ref_msg == type_dict['type']: |
| 618 | if module['name'] != type_dict['module']: |
| 619 | used_imports.add(type_dict['module']) |
| 620 | break |
| 621 | module['imports'] = [{'name' : i} for i in used_imports] |
| 622 | |
| 623 | |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 624 | def update_referred_messages(all_referred_messages, all_duplicate_messages): |
| 625 | new_referred_messages = [] |
| 626 | for ref in all_referred_messages: |
| 627 | if ref in all_duplicate_messages: |
| 628 | new_referred_messages.append(''.join([ref, '_grouping'])) |
| 629 | else: |
| 630 | new_referred_messages.append(ref) |
| 631 | |
| 632 | return new_referred_messages |
| 633 | |
| 634 | def update_message_references_based_on_duplicates(duplicates, messages): |
| 635 | # Duplicates has a list of messages that exist both as a grouping and as |
| 636 | # a container. All reference to the container name by existing fields |
| 637 | # should be changed to the grouping name instead |
| 638 | for m in messages: |
| 639 | for f in m['fields']: |
| 640 | if f['type'] in duplicates: |
| 641 | f['type'] = ''.join([f['type'], '_grouping']) |
| 642 | if m['messages']: |
| 643 | update_message_references_based_on_duplicates(duplicates, |
| 644 | m['messages']) |
| 645 | |
| 646 | def update_servic_references_based_on_duplicates(duplicates, services): |
| 647 | # Duplicates has a list of messages that exist both as a grouping and as |
| 648 | # a container. All reference to the container name by existing fields |
| 649 | # should be changed to the grouping name instead |
| 650 | for s in services: |
| 651 | for m in s['methods']: |
| 652 | if m['input_ref'] and m['input'] in duplicates: |
| 653 | m['input'] = ''.join([m['input'], '_grouping']) |
| 654 | if m['output_ref'] and m['output'] in duplicates: |
| 655 | m['output'] = ''.join([m['output'], '_grouping']) |
| 656 | |
| 657 | |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 658 | def generate_code(request, response): |
| 659 | assert isinstance(request, plugin.CodeGeneratorRequest) |
| 660 | |
| 661 | parser = DescriptorParser() |
| 662 | |
| 663 | # First process the proto file with the imports |
| 664 | all_defined_types = [] |
| 665 | all_proto_data = [] |
| 666 | all_referred_messages = [] |
| 667 | all_messages = [] |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 668 | all_duplicate_messages = [] |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 669 | for proto_file in request.proto_file: |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 670 | options = traverse_options(proto_file) |
| 671 | # print options |
| 672 | |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 673 | native_data = parser.parse_file_descriptor(proto_file, |
| 674 | type_tag_name='_type', |
| 675 | fold_comments=True) |
| 676 | |
| 677 | # Consolidate the defined types across imports |
| 678 | yang_data = traverse_desc(native_data) |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 679 | |
| 680 | duplicates = [] |
| 681 | if options: |
| 682 | new_messages, new_enums, duplicates = \ |
| 683 | update_messages_per_annotations_rule( |
| 684 | options, yang_data['messages'], yang_data['enums']) |
| 685 | |
| 686 | new_messages = update_fields_per_annotations_rule(options, |
| 687 | new_messages) |
| 688 | |
| 689 | # TODO: Need to do the change across all schema files. Not |
| 690 | # needed as annotations are single file based for now |
| 691 | if duplicates: |
| 692 | update_message_references_based_on_duplicates(duplicates, |
| 693 | new_messages) |
| 694 | update_servic_references_based_on_duplicates(duplicates, |
| 695 | yang_data['services']) |
| 696 | |
| 697 | yang_data['messages'] = new_messages |
| 698 | yang_data['enums'] = new_enums |
| 699 | |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 700 | for type in yang_data['defined_types']: |
| 701 | all_defined_types.append( |
| 702 | { |
| 703 | 'type' : type, |
| 704 | 'module' : yang_data['name'] |
| 705 | } |
| 706 | ) |
| 707 | |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 708 | |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 709 | all_proto_data.append( |
| 710 | { |
| 711 | 'file_name': '{}-{}'.format('ietf', proto_file.name.split( |
| 712 | '/')[-1].replace('.proto','.yang')), |
| 713 | 'module': yang_data |
| 714 | } |
| 715 | ) |
| 716 | |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 717 | # Consolidate all duplicate messages |
| 718 | all_duplicate_messages = all_duplicate_messages + duplicates |
| 719 | |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 720 | # Consolidate referred messages across imports |
| 721 | all_referred_messages = all_referred_messages + yang_data['referenced_messages'] |
| 722 | |
| 723 | # consolidate all messages |
| 724 | all_messages = all_messages + yang_data['messages'] |
| 725 | |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 726 | # Update the referred_messages |
| 727 | all_referred_messages = update_referred_messages(all_referred_messages, all_duplicate_messages) |
| 728 | |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 729 | # Set the message keys - required for List definitions (repeated label) |
| 730 | set_messages_keys(all_messages) |
| 731 | unique_referred_messages_with_keys = [] |
| 732 | for m in all_messages: |
| 733 | unique_referred_messages_with_keys.append( |
| 734 | { |
| 735 | 'name': m['name'], |
| 736 | 'key': m['key'] |
| 737 | } |
| 738 | ) |
| 739 | |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 740 | # print_referred_msg(unique_referred_messages_with_keys) |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 741 | # Create the files |
| 742 | for proto_data in all_proto_data: |
| 743 | f = response.file.add() |
| 744 | f.name = proto_data['file_name'] |
| 745 | proto_data['module']['data_types'] = all_defined_types |
| 746 | proto_data['module']['referred_messages'] = all_referred_messages |
| 747 | proto_data['module']['referred_messages_with_keys'] = unique_referred_messages_with_keys |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 748 | proto_data['module']['duplicates'] = all_duplicate_messages |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 749 | update_module_imports(proto_data['module']) |
Khen Nursimulu | 7626ce1 | 2016-12-21 11:51:46 -0500 | [diff] [blame] | 750 | # print_message(proto_data['module']['messages']) |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 751 | f.content = template_yang.render(module=proto_data['module']) |
| 752 | |
| 753 | |
| 754 | def get_yang_type(field): |
| 755 | type = field['type'] |
| 756 | if type in YANG_TYPE_MAP.keys(): |
| 757 | _type, _ = YANG_TYPE_MAP[type] |
| 758 | if _type in ['enumeration', 'message', 'group']: |
| 759 | return field['type_name'].split('.')[-1] |
Khen Nursimulu | aaac7ee | 2016-12-11 22:03:52 -0500 | [diff] [blame] | 760 | else: |
| 761 | return _type |
| 762 | else: |
| 763 | return type |
| 764 | |
| 765 | def is_enumeration(type): |
| 766 | if type in YANG_TYPE_MAP.keys(): |
| 767 | _type, _ = YANG_TYPE_MAP[type] |
| 768 | return _type in ['enumeration'] |
| 769 | return False |
| 770 | |
| 771 | def is_base_type(type): |
| 772 | # check numeric value of the type first |
| 773 | if type in YANG_TYPE_MAP.keys(): |
| 774 | _type, _ = YANG_TYPE_MAP[type] |
| 775 | return _type not in ['message', 'group'] |
| 776 | else: |
| 777 | # proto name of the type |
| 778 | result = [_format for (_, _format) in YANG_TYPE_MAP.values() if |
| 779 | _format == type and _format not in ['message', |
| 780 | 'group']] |
| 781 | return len(result) > 0 |
| 782 | |
| 783 | |
| 784 | def remove_unsupported_characters(text): |
| 785 | unsupported_characters = ["{", "}", "[", "]", "\"", "\\", "*", "/"] |
| 786 | return ''.join([i if i not in unsupported_characters else ' ' for i in |
| 787 | text]) |
| 788 | |
| 789 | |
| 790 | def remove_first_character_if_match(str, char): |
| 791 | if str.startswith(char): |
| 792 | return str[1:] |
| 793 | return str |
| 794 | |
| 795 | |
| 796 | YANG_TYPE_MAP = { |
| 797 | FieldDescriptor.TYPE_BOOL: ('boolean', 'boolean'), |
| 798 | FieldDescriptor.TYPE_BYTES: ('binary', 'byte'), |
| 799 | FieldDescriptor.TYPE_DOUBLE: ('decimal64', 'double'), |
| 800 | FieldDescriptor.TYPE_ENUM: ('enumeration', 'enum'), |
| 801 | FieldDescriptor.TYPE_FIXED32: ('int32', 'int64'), |
| 802 | FieldDescriptor.TYPE_FIXED64: ('int64', 'uint64'), |
| 803 | FieldDescriptor.TYPE_FLOAT: ('decimal64', 'float'), |
| 804 | FieldDescriptor.TYPE_INT32: ('int32', 'int32'), |
| 805 | FieldDescriptor.TYPE_INT64: ('int64', 'int64'), |
| 806 | FieldDescriptor.TYPE_SFIXED32: ('int32', 'int32'), |
| 807 | FieldDescriptor.TYPE_SFIXED64: ('int64', 'int64'), |
| 808 | FieldDescriptor.TYPE_STRING: ('string', 'string'), |
| 809 | FieldDescriptor.TYPE_SINT32: ('int32', 'int32'), |
| 810 | FieldDescriptor.TYPE_SINT64: ('int64', 'int64'), |
| 811 | FieldDescriptor.TYPE_UINT32: ('uint32', 'int64'), |
| 812 | FieldDescriptor.TYPE_UINT64: ('uint64', 'uint64'), |
| 813 | FieldDescriptor.TYPE_MESSAGE: ('message', 'message'), |
| 814 | FieldDescriptor.TYPE_GROUP: ('group', 'group') |
| 815 | } |
| 816 | |
| 817 | if __name__ == '__main__': |
| 818 | # Read request message from stdin |
| 819 | data = sys.stdin.read() |
| 820 | |
| 821 | # Parse request |
| 822 | request = plugin.CodeGeneratorRequest() |
| 823 | request.ParseFromString(data) |
| 824 | |
| 825 | # Create response |
| 826 | response = plugin.CodeGeneratorResponse() |
| 827 | |
| 828 | # Generate code |
| 829 | generate_code(request, response) |
| 830 | |
| 831 | # Serialise response message |
| 832 | output = response.SerializeToString() |
| 833 | |
| 834 | # Write to stdout |
| 835 | sys.stdout.write(output) |
| 836 | # print is_base_type(9) |