move the library to ONF
Change-Id: I383437e2934ce04cc1a7dc332134f7308991776f
diff --git a/grpc_robot/tools/__init__.py b/grpc_robot/tools/__init__.py
new file mode 100644
index 0000000..b1ed822
--- /dev/null
+++ b/grpc_robot/tools/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2020-present Open Networking Foundation
+# Original copyright 2020-present ADTRAN, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
diff --git a/grpc_robot/tools/dmi_tools.py b/grpc_robot/tools/dmi_tools.py
new file mode 100644
index 0000000..5bdb168
--- /dev/null
+++ b/grpc_robot/tools/dmi_tools.py
@@ -0,0 +1,84 @@
+# Copyright 2020-present Open Networking Foundation
+# Original copyright 2020-present ADTRAN, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+from grpc_robot.grpc_robot import _package_version_get
+
+from dmi import hw_metrics_mgmt_service_pb2, hw_events_mgmt_service_pb2
+from ..tools.protobuf_to_dict import protobuf_to_dict
+
+
+class DmiTools(object):
+ """
+ Tools for the device-management-interface, e.g decoding / conversions.
+ """
+
+ try:
+ ROBOT_LIBRARY_VERSION = _package_version_get('grpc_robot')
+ except NameError:
+ ROBOT_LIBRARY_VERSION = 'unknown'
+
+ @staticmethod
+ def hw_metrics_mgmt_decode_metric(bytestring, return_enum_integer='false', return_defaults='false', human_readable_timestamps='true'):
+ """
+ Converts bytes to a Metric as defined in _message Metric_ from hw_metrics_mgmt_service.proto
+
+ *Parameters*:
+ - bytestring: <bytes>; Byte string, e.g. as it comes from Kafka messages.
+ - return_enum_integer: <string> or <bool>; Whether or not to return the enum values as integer values rather than their labels. Default: _false_.
+ - return_defaults: <string> or <bool>; Whether or not to return the default values. Default: _false_.
+ - human_readable_timestamps: <string> or <bool>; Whether or not to convert the timestamps to human-readable format. Default: _true_.
+
+ *Return*: A dictionary with same structure as the _metric_ key from the return dictionary of keyword _Hw Metrics Mgmt Service Get Metric_.
+
+ *Example*:
+ | Import Library | grpc_robot.DmiTools | WITH NAME | dmi_tools |
+ | ${kafka_records} | kafka.Records Get |
+ | FOR | ${kafka_record} | IN | @{kafka_records} |
+ | | ${metric} | dmi_tools.Hw Metrics Mgmt Decode Metric | ${kafka_record}[message] |
+ | | Log | ${metric} |
+ | END |
+ """
+ return_enum_integer = str(return_enum_integer).lower() == 'true'
+ metric = hw_metrics_mgmt_service_pb2.Metric.FromString(bytestring)
+ return protobuf_to_dict(metric,
+ use_enum_labels=not return_enum_integer,
+ including_default_value_fields=str(return_defaults).lower() == 'true',
+ human_readable_timestamps=str(human_readable_timestamps).lower() == 'true')
+
+ @staticmethod
+ def hw_events_mgmt_decode_event(bytestring, return_enum_integer='false', return_defaults='false', human_readable_timestamps='true'):
+ """
+ Converts bytes to a Event as defined in _message Event_ from hw_events_mgmt_service.proto
+
+ *Parameters*:
+ - bytestring: <bytes>; Byte string, e.g. as it comes from Kafka messages.
+ - return_enum_integer: <string> or <bool>; Whether or not to return the enum values as integer values rather than their labels. Default: _false_.
+ - return_defaults: <string> or <bool>; Whether or not to return the default values. Default: _false_.
+ - human_readable_timestamps: <string> or <bool>; Whether or not to convert the timestamps to human-readable format. Default: _true_.
+
+ *Return*: A dictionary with same structure as the _event_ key from the return dictionary of keyword _Hw Event Mgmt Service List Events_.
+
+ *Example*:
+ | Import Library | grpc_robot.DmiTools | WITH NAME | dmi_tools |
+ | ${kafka_records} | kafka.Records Get |
+ | FOR | ${kafka_record} | IN | @{kafka_records} |
+ | | ${event} | dmi_tools.Hw Events Mgmt Decode Event | ${kafka_record}[message] |
+ | | Log | ${event} |
+ | END |
+ """
+ return_enum_integer = str(return_enum_integer).lower() == 'true'
+ event = hw_events_mgmt_service_pb2.Event.FromString(bytestring)
+ return protobuf_to_dict(event,
+ use_enum_labels=not return_enum_integer,
+ including_default_value_fields=str(return_defaults).lower() == 'true',
+ human_readable_timestamps=str(human_readable_timestamps).lower() == 'true')
diff --git a/grpc_robot/tools/protobuf_parse.py b/grpc_robot/tools/protobuf_parse.py
new file mode 100644
index 0000000..f11e1a3
--- /dev/null
+++ b/grpc_robot/tools/protobuf_parse.py
@@ -0,0 +1,456 @@
+# Copyright 2020-present Open Networking Foundation
+# Original copyright 2020-present ADTRAN, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# -*- coding: utf-8 -*-
+
+# Parser for protocol buffer .proto files
+import enum as stdlib_enum
+from string import ascii_letters, digits, hexdigits, octdigits
+
+import attr
+
+from parsy import char_from, from_enum, generate, regex, seq, string
+
+# This file follows the spec at
+# https://developers.google.com/protocol-buffers/docs/reference/proto3-spec
+# very closely.
+
+# However, because we are parsing into useful objects, we do transformations
+# along the way e.g. turning into integers, strings etc. and custom objects.
+# Some of the lowest level items have been implemented using 'regex' and converting
+# the descriptions to regular expressions. Higher level constructs have been
+# implemented using other parsy primitives and combinators.
+
+# Notes:
+
+# 1. Whitespace is very badly defined in the 'spec', so we guess what is meant.
+# 2. The spec doesn't allow for comments, and neither does this parser.
+# Other places mention that C++ style comments are allowed. To support that,
+# this parser would need to be changed into split lexing/parsing stages
+# (otherwise you hit issues with comments start markers within string literals).
+# 3. Other notes inline.
+
+
+# Our utilities
+optional_string = lambda s: string(s).times(0, 1).concat()
+convert_decimal = int
+convert_octal = lambda s: int(s, 8)
+convert_hex = lambda s: int(s, 16)
+exclude_none = lambda l: [i for i in l if i is not None]
+
+
+def lexeme(p):
+ """
+ From a parser (or string), make a parser that consumes
+ whitespace on either side.
+ """
+ if isinstance(p, str):
+ p = string(p)
+ return regex(r'\s*') >> p << regex(r'\s*')
+
+
+def is_present(p):
+ """
+ Given a parser or string, make a parser that returns
+ True if the parser matches, False otherwise
+ """
+ return lexeme(p).optional().map(lambda v: False if v is None else True)
+
+
+# Our data structures
+@attr.s
+class Import:
+ identifier = attr.ib()
+ option = attr.ib()
+
+
+@attr.s
+class Package:
+ identifer = attr.ib()
+
+
+@attr.s
+class Option:
+ name = attr.ib()
+ value = attr.ib()
+
+
+@attr.s
+class Field:
+ repeated = attr.ib()
+ type = attr.ib()
+ name = attr.ib()
+ number = attr.ib()
+ options = attr.ib()
+
+
+@attr.s
+class OneOfField:
+ type = attr.ib()
+ name = attr.ib()
+ number = attr.ib()
+ options = attr.ib()
+
+
+@attr.s
+class OneOf:
+ name = attr.ib()
+ fields = attr.ib()
+
+
+@attr.s
+class Map:
+ key_type = attr.ib()
+ type = attr.ib()
+ name = attr.ib()
+ number = attr.ib()
+ options = attr.ib()
+
+
+@attr.s
+class Reserved:
+ items = attr.ib()
+
+
+@attr.s
+class Range:
+ from_ = attr.ib()
+ to = attr.ib()
+
+
+@attr.s
+class EnumField:
+ name = attr.ib()
+ value = attr.ib()
+ options = attr.ib()
+
+
+@attr.s
+class Enum:
+ name = attr.ib()
+ body = attr.ib()
+
+
+@attr.s
+class Message:
+ name = attr.ib()
+ body = attr.ib()
+
+
+@attr.s
+class Service:
+ name = attr.ib()
+ body = attr.ib()
+
+
+@attr.s
+class Rpc:
+ name = attr.ib()
+ request_stream = attr.ib()
+ request_message_type = attr.ib()
+ response_stream = attr.ib()
+ response_message_type = attr.ib()
+ options = attr.ib()
+
+
+@attr.s
+class Proto:
+ syntax = attr.ib()
+ statements = attr.ib()
+
+
+# Enums:
+class ImportOption(stdlib_enum.Enum):
+ WEAK = "weak"
+ PUBLIC = "public"
+
+
+class Type(stdlib_enum.Enum):
+ DOUBLE = "double"
+ FLOAT = "float"
+ INT32 = "int32"
+ INT64 = "int64"
+ UINT32 = "uint32"
+ UINT64 = "uint64"
+ SINT32 = "sint32"
+ SINT64 = "sint64"
+ FIXED32 = "fixed32"
+ FIXED64 = "fixed64"
+ SFIXED32 = "sfixed32"
+ SFIXED64 = "sfixed64"
+ BOOL = "bool"
+ STRING = "string"
+ BYTES = "bytes"
+
+
+class KeyType(stdlib_enum.Enum):
+ INT32 = "int32"
+ INT64 = "int64"
+ UINT32 = "uint32"
+ UINT64 = "uint64"
+ SINT32 = "sint32"
+ SINT64 = "sint64"
+ FIXED32 = "fixed32"
+ FIXED64 = "fixed64"
+ SFIXED32 = "sfixed32"
+ SFIXED64 = "sfixed64"
+ BOOL = "bool"
+ STRING = "string"
+
+
+# Some extra constants to avoid typing
+SEMI, EQ, LPAREN, RPAREN, LBRACE, RBRACE, LBRAC, RBRAC = [lexeme(c) for c in ";=(){}[]"]
+
+
+# -- Beginning of following spec --
+# Letters and digits
+letter = char_from(ascii_letters)
+decimalDigit = char_from(digits)
+octalDigit = char_from(octdigits)
+hexDigit = char_from(hexdigits)
+
+# Identifiers
+
+# Compared to spec, we add some '_' prefixed items which are not wrapped in `lexeme`,
+# on the assumption that spaces in the middle of identifiers are not accepted.
+_ident = (letter + (letter | decimalDigit | string("_")).many().concat()).desc('ident')
+ident = lexeme(_ident)
+fullIdent = lexeme(ident + (string(".") + ident).many().concat()).desc('fullIdent')
+_messageName = _ident
+messageName = lexeme(ident).desc('messageName')
+_enumName = ident
+enumName = lexeme(_enumName).desc('enumName')
+fieldName = ident.desc('fieldName')
+oneofName = ident.desc('oneofName')
+mapName = ident.desc('mapName')
+serviceName = ident.desc('serviceName')
+rpcName = ident.desc('rpcName')
+messageType = optional_string(".") + (_ident + string(".")).many().concat() + _messageName
+enumType = optional_string(".") + (_ident + string(".")).many().concat() + _enumName
+
+# Integer literals
+decimalLit = regex("[1-9][0-9]*").desc('decimalLit').map(convert_decimal)
+octalLit = regex("0[0-7]*").desc('octalLit').map(convert_octal)
+hexLit = regex("0[x|X][0-9a-fA-F]+").desc('octalLit').map(convert_hex)
+intLit = decimalLit | octalLit | hexLit
+
+
+# Floating-point literals
+decimals = r'[0-9]+'
+exponent = r'[e|E][+|-]?' + decimals
+floatLit = regex(r'({decimals}\.({decimals})?({exponent})?)|{decimals}{exponent}|\.{decimals}({exponent})?'
+ .format(decimals=decimals, exponent=exponent)).desc('floatLit').map(float)
+
+
+# Boolean
+boolLit = (string("true").result(True) | string("false").result(False)).desc('boolLit')
+
+
+# String literals
+hexEscape = regex(r"\\[x|X]") >> regex("[0-9a-fA-F]{2}").map(convert_hex).map(chr)
+octEscape = regex(r"\\") >> regex('[0-7]{2}').map(convert_octal).map(chr)
+charEscape = regex(r"\\") >> (
+ string("a").result("\a")
+ | string("b").result("\b")
+ | string("f").result("\f")
+ | string("n").result("\n")
+ | string("r").result("\r")
+ | string("t").result("\t")
+ | string("v").result("\v")
+ | string("\\").result("\\")
+ | string("'").result("'")
+ | string('"').result('"')
+)
+escapes = hexEscape | octEscape | charEscape
+# Correction to spec regarding " and ' inside quoted strings
+strLit = (string("'") >> (escapes | regex(r"[^\0\n\'\\]")).many().concat() << string("'")
+ | string('"') >> (escapes | regex(r"[^\0\n\"\\]")).many().concat() << string('"')).desc('strLit')
+quote = string("'") | string('"')
+
+# EmptyStatement
+emptyStatement = string(";").result(None)
+
+# Signed numbers:
+# (Extra compared to spec, to cope with need to produce signed numeric values)
+signedNumberChange = lambda s, num: (-1) if s == "-" else (+1)
+sign = regex("[-+]?")
+signedIntLit = seq(sign, intLit).combine(signedNumberChange)
+signedFloatLit = seq(sign, floatLit).combine(signedNumberChange)
+
+
+# Constant
+# put fullIdent at end to disabmiguate from boolLit
+constant = signedIntLit | signedFloatLit | strLit | boolLit | fullIdent
+
+# Syntax
+syntax = lexeme("syntax") >> EQ >> quote >> string("proto3") << quote + SEMI
+
+# Import Statement
+import_option = from_enum(ImportOption)
+
+import_ = seq(lexeme("import") >> import_option.optional().tag('option'),
+ lexeme(strLit).tag('identifier') << SEMI).combine_dict(Import)
+
+# Package
+package = seq(lexeme("package") >> fullIdent << SEMI).map(Package)
+
+# Option
+optionName = (ident | (LPAREN >> fullIdent << RPAREN)) + (string(".") + ident).many().concat()
+option = seq(lexeme("option") >> optionName.tag('name'),
+ EQ >> constant.tag('value') << SEMI,
+ ).combine_dict(Option)
+
+# Normal field
+type_ = lexeme(from_enum(Type) | messageType | enumType)
+fieldNumber = lexeme(intLit)
+
+fieldOption = seq(optionName.tag('name'),
+ EQ >> constant.tag('value')).combine_dict(Option)
+fieldOptions = fieldOption.sep_by(lexeme(","), min=1)
+fieldOptionList = (lexeme("[") >> fieldOptions << lexeme("]")).optional().map(
+ lambda o: [] if o is None else o)
+
+field = seq(is_present("repeated").tag('repeated'),
+ type_.tag('type'),
+ fieldName.tag('name') << EQ,
+ fieldNumber.tag('number'),
+ fieldOptionList.tag('options') << SEMI,
+ ).combine_dict(Field)
+
+# Oneof and oneof field
+oneofField = seq(type_.tag('type'),
+ fieldName.tag('name') << EQ,
+ fieldNumber.tag('number'),
+ fieldOptionList.tag('options') << SEMI,
+ ).combine_dict(OneOfField)
+oneof = seq(lexeme("oneof") >> oneofName.tag('name'),
+ LBRACE
+ >> (oneofField | emptyStatement).many().map(exclude_none).tag('fields')
+ << RBRACE
+ ).combine_dict(OneOf)
+
+# Map field
+keyType = lexeme(from_enum(KeyType))
+mapField = seq(lexeme("map") >> lexeme("<") >> keyType.tag('key_type'),
+ lexeme(",") >> type_.tag('type'),
+ lexeme(">") >> mapName.tag('name'),
+ EQ >> fieldNumber.tag('number'),
+ fieldOptionList.tag('options') << SEMI
+ ).combine_dict(Map)
+
+# Reserved
+range_ = seq(lexeme(intLit).tag('from_'),
+ (lexeme("to") >> (intLit | lexeme("max"))).optional().tag('to')
+ ).combine_dict(Range)
+ranges = range_.sep_by(lexeme(","), min=1)
+# The spec for 'reserved' indicates 'fieldName' here, which is never a quoted string.
+# But the example has a quoted string. We have changed it to 'strLit'
+fieldNames = strLit.sep_by(lexeme(","), min=1)
+reserved = seq(lexeme("reserved") >> (ranges | fieldNames) << SEMI
+ ).combine(Reserved)
+
+# Enum definition
+enumValueOption = seq(optionName.tag('name') << EQ,
+ constant.tag('value')
+ ).combine_dict(Option)
+enumField = seq(ident.tag('name') << EQ,
+ lexeme(intLit).tag('value'),
+ (lexeme("[") >> enumValueOption.sep_by(lexeme(","), min=1) << lexeme("]")).optional()
+ .map(lambda o: [] if o is None else o).tag('options')
+ << SEMI
+ ).combine_dict(EnumField)
+enumBody = (LBRACE
+ >> (option | enumField | emptyStatement).many().map(exclude_none)
+ << RBRACE)
+enum = seq(lexeme("enum") >> enumName.tag('name'),
+ enumBody.tag('body')
+ ).combine_dict(Enum)
+
+
+# Message definition
+@generate
+def message():
+ yield lexeme("message")
+ name = yield messageName
+ body = yield messageBody
+ return Message(name=name, body=body)
+
+
+messageBody = (LBRACE
+ >> (field | enum | message | option | oneof | mapField
+ | reserved | emptyStatement).many()
+ << RBRACE)
+
+
+# Service definition
+rpc = seq(lexeme("rpc") >> rpcName.tag('name'),
+ LPAREN
+ >> (is_present("stream").tag("request_stream")),
+ messageType.tag("request_message_type") << RPAREN,
+ lexeme("returns") >> LPAREN
+ >> (is_present("stream").tag("response_stream")),
+ messageType.tag("response_message_type")
+ << RPAREN,
+ ((LBRACE
+ >> (option | emptyStatement).many()
+ << RBRACE)
+ | SEMI.result([])
+ ).optional().map(exclude_none).tag('options')
+ ).combine_dict(Rpc)
+
+service = seq(lexeme("service") >> serviceName.tag('name'),
+ LBRACE
+ >> (option | rpc | emptyStatement).many().map(exclude_none).tag('body')
+ << RBRACE
+ ).combine_dict(Service)
+
+
+# Proto file
+topLevelDef = message | enum | service
+proto = seq(syntax.tag('syntax'),
+ (import_ | package | option | topLevelDef | emptyStatement
+ ).many().map(exclude_none).tag('statements')
+ ).combine_dict(Proto)
+
+
+EXAMPLE = """syntax = "proto3";
+import public "other.proto";
+option java_package = "com.example.foo";
+option java_package = "com.example.foo";
+package dmi;
+
+enum EnumAllowingAlias {
+ option allow_alias = true;
+ UNKNOWN = 0;
+ STARTED = 1;
+ RUNNING = 2 [(custom_option) = "hello world"];
+}
+message outer {
+ option (my_option).a = true;
+ message inner {
+ int64 ival = 1;
+ }
+ repeated inner inner_message = 2;
+ EnumAllowingAlias enum_field =3;
+ map<int32, string> my_map = 4;
+ oneof operation {
+ MetricsConfig changes = 2;
+ bool reset_to_default = 3;
+ }
+}
+"""
+# Smoke test - should find 4 top level statements in the example:
+# assert len(proto.parse(EXAMPLE).statements) == 4
+# print(proto.parse(EXAMPLE).statements)
+# for st in proto.parse(EXAMPLE).statements:
+# print(type(st))
diff --git a/grpc_robot/tools/protobuf_to_dict.py b/grpc_robot/tools/protobuf_to_dict.py
new file mode 100644
index 0000000..19ebf1a
--- /dev/null
+++ b/grpc_robot/tools/protobuf_to_dict.py
@@ -0,0 +1,281 @@
+# Copyright 2020-present Open Networking Foundation
+# Original copyright 2020-present ADTRAN, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+
+# This is free and unencumbered software released into the public domain
+# by its author, Ben Hodgson <ben@benhodgson.com>.
+#
+# Anyone is free to copy, modify, publish, use, compile, sell, or
+# distribute this software, either in source code form or as a compiled
+# binary, for any purpose, commercial or non-commercial, and by any
+# means.
+#
+# In jurisdictions that recognise copyright laws, the author or authors
+# of this software dedicate any and all copyright interest in the
+# software to the public domain. We make this dedication for the benefit
+# of the public at large and to the detriment of our heirs and
+# successors. We intend this dedication to be an overt act of
+# relinquishment in perpetuity of all present and future rights to this
+# software under copyright law.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# For more information, please refer to <http://unlicense.org/>
+
+
+# -*- coding:utf-8 -*-
+
+# copied from https://github.com/kaporzhu/protobuf-to-dict
+# all credits to this script go to Kapor Zhu (kapor.zhu@gmail.com)
+#
+# Comments:
+# - need a fix for bug: "Use enum_label when setting the default value if use_enum_labels is true" (line 95)
+# - try to convert timestaps to a human readable format
+
+import base64
+
+import six
+from datetime import datetime
+
+from google.protobuf.message import Message
+from google.protobuf.descriptor import FieldDescriptor
+
+
+__all__ = ["protobuf_to_dict", "TYPE_CALLABLE_MAP", "dict_to_protobuf",
+ "REVERSE_TYPE_CALLABLE_MAP"]
+
+
+EXTENSION_CONTAINER = '___X'
+
+
+TYPE_CALLABLE_MAP = {
+ FieldDescriptor.TYPE_DOUBLE: float,
+ FieldDescriptor.TYPE_FLOAT: float,
+ FieldDescriptor.TYPE_INT32: int,
+ FieldDescriptor.TYPE_INT64: int if six.PY3 else six.integer_types[1],
+ FieldDescriptor.TYPE_UINT32: int,
+ FieldDescriptor.TYPE_UINT64: int if six.PY3 else six.integer_types[1],
+ FieldDescriptor.TYPE_SINT32: int,
+ FieldDescriptor.TYPE_SINT64: int if six.PY3 else six.integer_types[1],
+ FieldDescriptor.TYPE_FIXED32: int,
+ FieldDescriptor.TYPE_FIXED64: int if six.PY3 else six.integer_types[1],
+ FieldDescriptor.TYPE_SFIXED32: int,
+ FieldDescriptor.TYPE_SFIXED64: int if six.PY3 else six.integer_types[1],
+ FieldDescriptor.TYPE_BOOL: bool,
+ FieldDescriptor.TYPE_STRING: six.text_type,
+ FieldDescriptor.TYPE_BYTES: six.binary_type,
+ FieldDescriptor.TYPE_ENUM: int,
+}
+
+
+def repeated(type_callable):
+ return lambda value_list: [type_callable(value) for value in value_list]
+
+
+def enum_label_name(field, value):
+ return field.enum_type.values_by_number[int(value)].name
+
+
+def _is_map_entry(field):
+ return (field.type == FieldDescriptor.TYPE_MESSAGE and
+ field.message_type.has_options and
+ field.message_type.GetOptions().map_entry)
+
+
+def protobuf_to_dict(pb, type_callable_map=TYPE_CALLABLE_MAP,
+ use_enum_labels=False,
+ including_default_value_fields=False,
+ human_readable_timestamps=False):
+ result_dict = {}
+ extensions = {}
+ for field, value in pb.ListFields():
+ if field.message_type and field.message_type.has_options and field.message_type.GetOptions().map_entry:
+ result_dict[field.name] = dict()
+ value_field = field.message_type.fields_by_name['value']
+ type_callable = _get_field_value_adaptor(
+ pb, value_field, type_callable_map,
+ use_enum_labels, including_default_value_fields)
+ for k, v in value.items():
+ result_dict[field.name][k] = type_callable(v)
+ continue
+ type_callable = _get_field_value_adaptor(pb, field, type_callable_map,
+ use_enum_labels,
+ including_default_value_fields,
+ human_readable_timestamps)
+ if field.label == FieldDescriptor.LABEL_REPEATED:
+ type_callable = repeated(type_callable)
+
+ if field.is_extension:
+ extensions[str(field.number)] = type_callable(value)
+ continue
+
+ if field.full_name in ['google.protobuf.Timestamp.seconds'] and human_readable_timestamps:
+ result_dict[field.name] = datetime.fromtimestamp(type_callable(value)).strftime('%Y-%m-%d %H:%M:%S.%f')
+ else:
+ result_dict[field.name] = type_callable(value)
+
+ # Serialize default value if including_default_value_fields is True.
+ if including_default_value_fields:
+ for field in pb.DESCRIPTOR.fields:
+ # Singular message fields and oneof fields will not be affected.
+ if ((
+ field.label != FieldDescriptor.LABEL_REPEATED and
+ field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE) or
+ field.containing_oneof):
+ continue
+ if field.name in result_dict:
+ # Skip the field which has been serailized already.
+ continue
+ if _is_map_entry(field):
+ result_dict[field.name] = {}
+ else:
+ if use_enum_labels and field.type == FieldDescriptor.TYPE_ENUM:
+ result_dict[field.name] = enum_label_name(field, field.default_value)
+ else:
+ result_dict[field.name] = field.default_value
+
+ if extensions:
+ result_dict[EXTENSION_CONTAINER] = extensions
+ return result_dict
+
+
+def _get_field_value_adaptor(pb, field, type_callable_map=TYPE_CALLABLE_MAP,
+ use_enum_labels=False,
+ including_default_value_fields=False,
+ human_readable_timestamps=False):
+ if field.type == FieldDescriptor.TYPE_MESSAGE:
+ # recursively encode protobuf sub-message
+ return lambda pb: protobuf_to_dict(
+ pb, type_callable_map=type_callable_map,
+ use_enum_labels=use_enum_labels,
+ including_default_value_fields=including_default_value_fields,
+ human_readable_timestamps=human_readable_timestamps
+ )
+
+ if use_enum_labels and field.type == FieldDescriptor.TYPE_ENUM:
+ return lambda value: enum_label_name(field, value)
+
+ if field.type in type_callable_map:
+ return type_callable_map[field.type]
+
+ raise TypeError("Field %s.%s has unrecognised type id %d" % (
+ pb.__class__.__name__, field.name, field.type))
+
+
+REVERSE_TYPE_CALLABLE_MAP = {
+}
+
+
+def dict_to_protobuf(pb_klass_or_instance, values, type_callable_map=REVERSE_TYPE_CALLABLE_MAP, strict=True, ignore_none=False):
+ """Populates a protobuf model from a dictionary.
+
+ :param pb_klass_or_instance: a protobuf message class, or an protobuf instance
+ :type pb_klass_or_instance: a type or instance of a subclass of google.protobuf.message.Message
+ :param dict values: a dictionary of values. Repeated and nested values are
+ fully supported.
+ :param dict type_callable_map: a mapping of protobuf types to callables for setting
+ values on the target instance.
+ :param bool strict: complain if keys in the map are not fields on the message.
+ :param bool strict: ignore None-values of fields, treat them as empty field
+ """
+ if isinstance(pb_klass_or_instance, Message):
+ instance = pb_klass_or_instance
+ else:
+ instance = pb_klass_or_instance()
+ return _dict_to_protobuf(instance, values, type_callable_map, strict, ignore_none)
+
+
+def _get_field_mapping(pb, dict_value, strict):
+ field_mapping = []
+ for key, value in dict_value.items():
+ if key == EXTENSION_CONTAINER:
+ continue
+ if key not in pb.DESCRIPTOR.fields_by_name:
+ if strict:
+ raise KeyError("%s does not have a field called %s" % (pb, key))
+ continue
+ field_mapping.append((pb.DESCRIPTOR.fields_by_name[key], value, getattr(pb, key, None)))
+
+ for ext_num, ext_val in dict_value.get(EXTENSION_CONTAINER, {}).items():
+ try:
+ ext_num = int(ext_num)
+ except ValueError:
+ raise ValueError("Extension keys must be integers.")
+ if ext_num not in pb._extensions_by_number:
+ if strict:
+ raise KeyError("%s does not have a extension with number %s. Perhaps you forgot to import it?" % (pb, key))
+ continue
+ ext_field = pb._extensions_by_number[ext_num]
+ pb_val = None
+ pb_val = pb.Extensions[ext_field]
+ field_mapping.append((ext_field, ext_val, pb_val))
+
+ return field_mapping
+
+
+def _dict_to_protobuf(pb, value, type_callable_map, strict, ignore_none):
+ fields = _get_field_mapping(pb, value, strict)
+
+ for field, input_value, pb_value in fields:
+ if ignore_none and input_value is None:
+ continue
+ if field.label == FieldDescriptor.LABEL_REPEATED:
+ if field.message_type and field.message_type.has_options and field.message_type.GetOptions().map_entry:
+ value_field = field.message_type.fields_by_name['value']
+ for key, value in input_value.items():
+ if value_field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
+ _dict_to_protobuf(getattr(pb, field.name)[key], value, type_callable_map, strict, ignore_none)
+ else:
+ getattr(pb, field.name)[key] = value
+ continue
+ for item in input_value:
+ if field.type == FieldDescriptor.TYPE_MESSAGE:
+ m = pb_value.add()
+ _dict_to_protobuf(m, item, type_callable_map, strict, ignore_none)
+ elif field.type == FieldDescriptor.TYPE_ENUM and isinstance(item, six.string_types):
+ pb_value.append(_string_to_enum(field, item))
+ else:
+ pb_value.append(item)
+ continue
+ if field.type == FieldDescriptor.TYPE_MESSAGE:
+ _dict_to_protobuf(pb_value, input_value, type_callable_map, strict, ignore_none)
+ continue
+
+ if field.type in type_callable_map:
+ input_value = type_callable_map[field.type](input_value)
+
+ if field.is_extension:
+ pb.Extensions[field] = input_value
+ continue
+
+ if field.type == FieldDescriptor.TYPE_ENUM and isinstance(input_value, six.string_types):
+ input_value = _string_to_enum(field, input_value)
+
+ setattr(pb, field.name, input_value)
+
+ return pb
+
+
+def _string_to_enum(field, input_value):
+ enum_dict = field.enum_type.values_by_name
+ try:
+ input_value = enum_dict[input_value].number
+ except KeyError:
+ raise KeyError("`%s` is not a valid value for field `%s`" % (input_value, field.name))
+ return input_value
diff --git a/grpc_robot/tools/protop.py b/grpc_robot/tools/protop.py
new file mode 100644
index 0000000..7e53a1d
--- /dev/null
+++ b/grpc_robot/tools/protop.py
@@ -0,0 +1,203 @@
+# Copyright 2020-present Open Networking Foundation
+# Original copyright 2020-present ADTRAN, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+import os
+import re
+import glob
+import json
+import argparse
+
+try:
+ from . import protobuf_parse as parser
+except ImportError:
+ import protobuf_parse as parser
+
+__version__ = '1.0'
+
+USAGE = """ProtoBuf -- Parser of Protocol Buffer to create the input JSON file for the library
+
+Usage: grpc_robot.protop [options] target_version
+
+ProtoBuf parser can be used to parse ProtoBuf files (*.proto) into a json formatted input file
+for the grpc_robot library to be used for keyword documentation.
+
+"""
+
+EPILOG = """
+Example
+=======
+# Executing `grpc_robot.protop` module using Python.
+$ grpc_robot.protop -i /home/user/Workspace/grpc/proto/dmi 0.9.1
+"""
+
+
+class ProtoBufParser(object):
+
+ def __init__(self, target, target_version, input_dir, output_dir=None):
+
+ super().__init__()
+
+ self.target = target
+ self.target_version = target_version.replace('.', '_')
+ self.input_dir = input_dir
+ self.output_dir = output_dir
+
+ @staticmethod
+ def read_enum(enum, protobuf_dict, module):
+ enum_dict = {'name': enum.name, 'type': 'enum', 'module': module, 'values': {ef.value: ef.name for ef in enum.body}}
+ protobuf_dict['data_types'].append(enum_dict)
+
+ def read_message(self, message, protobuf_dict, module):
+ message_dict = {'name': message.name, 'type': 'message', 'module': module, 'fields': []}
+
+ for f in message.body:
+
+ if f is None:
+ continue
+
+ if isinstance(f, parser.Enum):
+ self.read_enum(f, protobuf_dict, module)
+ continue
+
+ elif isinstance(f, parser.Message):
+ self.read_message(f, protobuf_dict, module)
+ continue
+
+ field_dict = {'name': f.name, 'is_choice': isinstance(f, parser.OneOf)}
+
+ if isinstance(f, parser.Field):
+ field_dict['repeated'] = f.repeated
+
+ try:
+ field_dict['type'] = f.type._value_
+ field_dict['lookup'] = False
+ except AttributeError:
+ field_dict['type'] = f.type
+ field_dict['lookup'] = True
+
+ elif isinstance(f, parser.OneOf):
+ field_dict['cases'] = []
+ for c in f.fields:
+ case_dict = {'name': c.name}
+ try:
+ case_dict['type'] = c.type._value_
+ case_dict['lookup'] = False
+ except AttributeError:
+ case_dict['type'] = c.type
+ case_dict['lookup'] = True
+ field_dict['cases'].append(case_dict)
+
+ message_dict['fields'].append(field_dict)
+
+ protobuf_dict['data_types'].append(message_dict)
+
+ def parse_files(self):
+
+ protobuf_dict = {
+ 'modules': [],
+ 'data_types': [],
+ 'services': []
+ }
+
+ for file_name in glob.glob(os.path.join(self.input_dir, '*.proto')):
+ print(file_name)
+
+ module = os.path.splitext(os.path.basename(file_name))[0]
+ module_dict = {'name': module, 'imports': []}
+
+ # the protobuf parser can not handle comments "// ...", so remove them first from the file
+ file_content = re.sub(r'\/\/.*', '', open(file_name).read())
+ parsed = parser.proto.parse(file_content)
+
+ # print(parsed.statements)
+
+ for p in parsed.statements:
+ # print(p)
+
+ if isinstance(p, parser.Import):
+ module_dict['imports'].append(os.path.splitext(os.path.basename(p.identifier))[0])
+
+ elif isinstance(p, parser.Enum):
+ self.read_enum(p, protobuf_dict, module)
+
+ elif isinstance(p, parser.Message):
+ self.read_message(p, protobuf_dict, module)
+
+ elif isinstance(p, parser.Service):
+ service_dict = {'name': p.name, 'module': module, 'rpcs': []}
+
+ for field in p.body:
+
+ if isinstance(field, parser.Enum):
+ self.read_enum(field, protobuf_dict, module)
+
+ elif isinstance(field, parser.Message):
+ self.read_message(field, protobuf_dict, module)
+
+ elif isinstance(field, parser.Rpc):
+ rpc_dict = {'name': field.name, 'request': {}, 'response': {}}
+
+ for attr in ['request', 'response']:
+ try:
+ rpc_dict[attr]['is_stream'] = field.__getattribute__('%s_stream' % attr)
+
+ try:
+ rpc_dict[attr]['type'] = field.__getattribute__('%s_message_type' % attr)._value_
+ rpc_dict[attr]['lookup'] = False
+ except AttributeError:
+ rpc_dict[attr]['type'] = field.__getattribute__('%s_message_type' % attr)
+ rpc_dict[attr]['lookup'] = not rpc_dict[attr]['type'].lower().startswith('google.protobuf.')
+
+ except AttributeError:
+ rpc_dict[attr] = None
+
+ service_dict['rpcs'].append(rpc_dict)
+
+ protobuf_dict['services'].append(service_dict)
+
+ protobuf_dict['modules'].append(module_dict)
+
+ if self.output_dir is not None:
+ json_file_name = os.path.join(self.output_dir, self.target, '%s_%s' % (self.target, self.target_version), '%s.json' % self.target)
+ json.dump(protobuf_dict, open(json_file_name, 'w'))
+
+ return protobuf_dict
+
+
+base_dir = os.path.dirname(os.path.realpath(__file__))
+output_base_dir = os.path.join(os.path.split(base_dir)[:-1][0], 'services')
+
+
+def main():
+ # create commandline parser
+ arg_parse = argparse.ArgumentParser(description=USAGE, epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter)
+
+ # add parser options
+ arg_parse.add_argument('target', choices=['dmi', 'voltha'],
+ help="Target type of which the ProtocolBuffer files shall be converted to the JSON file.")
+ arg_parse.add_argument('target_version', help="Version number of the ProtocolBuffer files.")
+
+ arg_parse.add_argument('-i', '--inputdir', default=os.getcwd(), help="Path to the location of the ProtocolBuffer files.")
+ arg_parse.add_argument('-o', '--outputdir', default=os.getcwd(), help="Path to the location JSON file to be stored.")
+
+ arg_parse.add_argument('-v', '--version', action='version', version=__version__)
+ arg_parse.set_defaults(feature=False)
+
+ # parse commandline
+ args = arg_parse.parse_args()
+
+ ProtoBufParser(args.target, args.target_version, args.inputdir or os.getcwd(), args.outputdir or output_base_dir).parse_files()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/grpc_robot/tools/robot_tools.py b/grpc_robot/tools/robot_tools.py
new file mode 100644
index 0000000..f5b7b0c
--- /dev/null
+++ b/grpc_robot/tools/robot_tools.py
@@ -0,0 +1,116 @@
+# Copyright 2020-present Open Networking Foundation
+# Original copyright 2020-present ADTRAN, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+from grpc_robot.grpc_robot import _package_version_get
+
+
+class Collections(object):
+ """
+ Tools for collections (list, dict) related functionality.
+ """
+
+ try:
+ ROBOT_LIBRARY_VERSION = _package_version_get('grpc_robot')
+ except NameError:
+ ROBOT_LIBRARY_VERSION = 'unknown'
+
+ @staticmethod
+ def dict_get_key_by_value(input_dict, search_value):
+ """
+ Gets the first key from _input_dict_ which has the value of _search_value_.
+
+ If _search_value_ is not found in _input_dict_, an empty string is returned.
+
+ *Parameters*:
+ - _input_dict_: <dictionary> to be browsed.
+ - _search_value_: <string>, value to be searched for.
+
+ *Return*: key of dictionary if search value is in input_dict else empty string
+ """
+ return_key = ''
+ for key, val in input_dict.items():
+ if val == search_value:
+ return_key = key
+ break
+
+ return return_key
+
+ @staticmethod
+ def dict_get_value(values_dict, key, strict=False):
+ """
+ Returns the value for given _key_ in _values_dict_.
+
+ If _strict_ is set to False (default) it will return given _key_ if its is not in the dictionary.
+ If set to True, an AssertionError is raised.
+
+ *Parameters*:
+ - _key_: <string>, key to be searched in dictionary.
+ - _values_dict_: <dictionary> in which the key is searched.
+ - _strict_: Optional: <boolean> switch to indicate if an exception shall be raised if key is not in values_dict.
+ Default: False
+
+ *Return*:
+ - if key is in values_dict: Value from _values_dict_ for _key_.
+ - else: _key_.
+ - raises AssertionError in case _key_ is not in _values_dict_ and _strict_ is True.
+ """
+ try:
+ return_value = values_dict[key]
+ except KeyError:
+ if strict:
+ raise AssertionError('Error: Value not found for key: %s' % key)
+ else:
+ return_value = key
+
+ return return_value
+
+ @staticmethod
+ def list_get_dict_by_value(input_list, key_name, value, match='first'):
+ """
+ Retrieves a dictionary from a list of dictionaries where _key_name_ has the _value, if _match_ is
+ "first". Else it returns all matching dictionaries.
+
+ *Parameters*:
+ - _input_list_: <list> ; List of dictionaries.
+ - _key_name_: <dictionary> or <list> ; Name of the key to be searched for.
+ - _value_: <string> or <number> ; Any value of key _key_name_ to be searched for.
+
+ *Example*:
+ | ${dict1} | Create Dictionary | key_key=master1 | key1=value11 | key2=value12 | |
+ | ${dict2} | Create Dictionary | key_key=master2 | key1=value21 | key2=value22 | |
+ | ${dict3} | Create Dictionary | key_key=master3 | key1=value31 | key2=value32 | |
+ | ${dict4} | Create Dictionary | key_key=master4 | key5=value41 | key6=value42 | |
+ | ${the_list} | Create List | ${dict1} | ${dict2} | ${dict3} | ${dict4} |
+ | ${result} | List Get Dict By Value | ${the_list} | key_key | master4 | |
+
+ Variable ${result} has following structure:
+ | ${result} = {
+ | 'key_key': 'master4',
+ | 'key5': 'value41',
+ | 'key6': 'value42'
+ | }
+ """
+ try:
+ if match == 'first':
+ return input_list[next(index for (index, d) in enumerate(input_list) if d[key_name] == value)]
+ else:
+ return [d for d in input_list if d[key_name] == value]
+ except (KeyError, TypeError, StopIteration):
+ raise KeyError('list does not contain a dictionary with key:value "%s:%s"' % (key_name, value))
+
+ @staticmethod
+ def to_camel_case(snake_str, first_uppercase=False):
+ components = snake_str.split('_')
+ # We capitalize the first letter of each component except the first one
+ # with the 'title' method and join them together.
+ return (components[0] if not first_uppercase else components[0].title()) + ''.join(x.title() for x in components[1:])
diff --git a/grpc_robot/tools/voltha_tools.py b/grpc_robot/tools/voltha_tools.py
new file mode 100644
index 0000000..ac18c5a
--- /dev/null
+++ b/grpc_robot/tools/voltha_tools.py
@@ -0,0 +1,122 @@
+# Copyright 2020-present Open Networking Foundation
+# Original copyright 2020-present ADTRAN, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+from grpc_robot.grpc_robot import _package_version_get
+
+from voltha_protos import events_pb2
+from voltha_protos import tech_profile_pb2
+from grpc_robot.tools.protobuf_to_dict import protobuf_to_dict
+
+
+class VolthaTools(object):
+ """
+ Tools for the voltha, e.g decoding / conversions.
+ """
+
+ try:
+ ROBOT_LIBRARY_VERSION = _package_version_get('grpc_robot')
+ except NameError:
+ ROBOT_LIBRARY_VERSION = 'unknown'
+
+ @staticmethod
+ def _convert_string_to_bytes(string):
+ """Converts a string to a bytes object."""
+ try:
+ return bytes.fromhex(string.replace('\\x', ' '))
+ except:
+ try:
+ b = bytearray()
+ b.extend(map(ord, string))
+ return bytes(b)
+ except (TypeError, AttributeError, SystemError):
+ return string
+
+ def events_decode_event(self, bytestring, return_enum_integer='false', return_defaults='false', human_readable_timestamps='true'):
+ """
+ Converts bytes to an Event as defined in _message Event_ from events.proto
+
+ *Parameters*:
+ - bytestring: <bytes>; Byte string, e.g. as it comes from Kafka messages.
+ - return_enum_integer: <string> or <bool>; Whether or not to return the enum values as integer values rather than their labels. Default: _false_.
+ - return_defaults: <string> or <bool>; Whether or not to return the default values. Default: _false_.
+ - human_readable_timestamps: <string> or <bool>; Whether or not to convert the timestamps to human-readable format. Default: _true_.
+
+ *Return*: A dictionary with _event_ structure.
+
+ *Example*:
+ | Import Library | grpc_robot.VolthaTools | WITH NAME | voltha_tools |
+ | ${kafka_records} | kafka.Records Get |
+ | FOR | ${kafka_record} | IN | @{kafka_records} |
+ | | ${event} | voltha_tools.Events Decode Event | ${kafka_record}[message] |
+ | | Log | ${event} |
+ | END |
+ """
+ return_enum_integer = str(return_enum_integer).lower() == 'true'
+ result = events_pb2.Event.FromString(self._convert_string_to_bytes(bytestring))
+ return protobuf_to_dict(result,
+ use_enum_labels=not return_enum_integer,
+ including_default_value_fields=str(return_defaults).lower() == 'true',
+ human_readable_timestamps=str(human_readable_timestamps).lower() == 'true')
+
+ def tech_profile_decode_resource_instance(self, bytestring, return_enum_integer='false', return_defaults='false', human_readable_timestamps='true'):
+ """
+ Converts bytes to an resource instance as defined in _message ResourceInstance_ from tech_profile.proto
+
+ *Parameters*:
+ - bytestring: <bytes>; Byte string, e.g. as it comes from Kafka messages.
+ - return_enum_integer: <string> or <bool>; Whether or not to return the enum values as integer values rather than their labels. Default: _false_.
+ - return_defaults: <string> or <bool>; Whether or not to return the default values. Default: _false_.
+ - human_readable_timestamps: <string> or <bool>; Whether or not to convert the timestamps to human-readable format. Default: _true_.
+
+ *Return*: A dictionary with _event_ structure.
+
+ *Example*:
+ | Import Library | grpc_robot.VolthaTools | WITH NAME | voltha_tools |
+ | ${kafka_records} | kafka.Records Get |
+ | FOR | ${kafka_record} | IN | @{kafka_records} |
+ | | ${event} | voltha_tools. Tech Profile Decode Resource Instance | ${kafka_record}[message] |
+ | | Log | ${event} |
+ | END |
+ """
+ return_enum_integer = str(return_enum_integer).lower() == 'true'
+ result = tech_profile_pb2.ResourceInstance.FromString(self._convert_string_to_bytes(bytestring))
+ return protobuf_to_dict(result,
+ use_enum_labels=not return_enum_integer,
+ including_default_value_fields=str(return_defaults).lower() == 'true',
+ human_readable_timestamps=str(human_readable_timestamps).lower() == 'true')
+
+
+if __name__ == '__main__':
+ messages = [
+ b'\nD\n#Voltha.openolt..1626255789301080436\x10\x02 \x02*\x030.12\x06\x08\xad\xe3\xba\x87\x06:\x0c\x08\xad\xe3\xba\x87\x06\x10\xd9\xc9\xc8\x8f\x01"\xc2\x02\x11\x00\x00@k\xac;\xd8A\x1a\xb6\x02\n\x93\x01\n\x08PONStats\x11\x00\x00@k\xac;\xd8A*$65950aaf-b40f-4697-b5c3-8deb50fedd5d2-\n\x05oltid\x12$65950aaf-b40f-4697-b5c3-8deb50fedd5d2\x15\n\ndevicetype\x12\x07openolt2\x12\n\tportlabel\x12\x05pon-0\x12\x10\n\tTxPackets\x15\x00\x00\x8bC\x12\x15\n\x0eTxMcastPackets\x15\x00\x00\xa6B\x12\x15\n\x0eTxBcastPackets\x15\x00\x00\xa6B\x12\x0e\n\x07RxBytes\x15\x00\x00\x8bF\x12\x10\n\tRxPackets\x15\x00\x00\x8bC\x12\x15\n\x0eRxMcastPackets\x15\x00\x00\xa6B\x12\x15\n\x0eRxBcastPackets\x15\x00\x00\xa6B\x12\x0e\n\x07TxBytes\x15\x00\x00\x8bF',
+ b'\nD\n#Voltha.openolt..1613491472935896440\x10\x02 \x02*\x030.12\x06\x08\xad\xe3\xba\x87\x06:\x0c\x08\xad\xe3\xba\x87\x06\x10\xd9\xc9\xc8\x8f\x01"\xc2\x02\x11\x00\x00@k\xac;\xd8A\x1a\xb6\x02\n\x93\x01\n\x08PONStats\x11\x00\x00@k\xac;\xd8A*$65950aaf-b40f-4697-b5c3-8deb50fedd5d2-\n\x05oltid\x12$65950aaf-b40f-4697-b5c3-8deb50fedd5d2\x15\n\ndevicetype\x12\x07openolt2\x12\n\tportlabel\x12\x05pon-0\x12\x10\n\tTxPackets\x15\x00\x00\x8bC\x12\x15\n\x0eTxMcastPackets\x15\x00\x00\xa6B\x12\x15\n\x0eTxBcastPackets\x15\x00\x00\xa6B\x12\x0e\n\x07RxBytes\x15\x00\x00\x8bF\x12\x10\n\tRxPackets\x15\x00\x00\x8bC\x12\x15\n\x0eRxMcastPackets\x15\x00\x00\xa6B\x12\x15\n\x0eRxBcastPackets\x15\x00\x00\xa6B\x12\x0e\n\x07TxBytes\x15\x00\x00\x8bF'
+ ]
+ for message in messages:
+ print(VolthaTools().events_decode_event(message))
+
+ messages = [
+ b"\x08\x40\x12\x07\x58\x47\x53\x2d\x50\x4f\x4e\x1a\x42\x6f\x6c\x74\x2d\x7b\x39\x35\x36\x31\x37\x31\x32\x62\x2d\x35\x33\x32\x33\x2d\x34\x64\x64\x63\x2d\x38\x36\x62\x34\x2d\x64\x35\x31\x62\x62\x34\x61\x65\x30\x37\x33\x39\x7d\x2f\x70\x6f\x6e\x2d\x7b\x31\x7d\x2f\x6f\x6e\x75\x2d\x7b\x32\x7d\x2f\x75\x6e\x69\x2d\x7b\x30\x7d\x20\x81\x08\x2a\x10\x88\x08\x89\x08\x8a\x08\x8b\x08\x8c\x08\x8d\x08\x8e\x08\x8f\x08",
+ b"\x08\x40\x12\x07\x58\x47\x53\x2d\x50\x4f\x4e\x1a\x42\x6f\x6c\x74\x2d\x7b\x35\x66\x35\x39\x61\x32\x32\x63\x2d\x37\x63\x37\x65\x2d\x34\x65\x30\x63\x2d\x39\x38\x30\x65\x2d\x37\x34\x66\x31\x35\x33\x62\x33\x32\x33\x38\x31\x7d\x2f\x70\x6f\x6e\x2d\x7b\x30\x7d\x2f\x6f\x6e\x75\x2d\x7b\x31\x7d\x2f\x75\x6e\x69\x2d\x7b\x30\x7d\x20\x81\x08\x2a\x10\x88\x08\x89\x08\x8a\x08\x8b\x08\x8c\x08\x8d\x08\x8e\x08\x8f\x08",
+ b"\x08\x40\x12\x07\x58\x47\x53\x2d\x50\x4f\x4e\x1a\x42\x6f\x6c\x74\x2d\x7b\x35\x66\x35\x39\x61\x32\x32\x63\x2d\x37\x63\x37\x65\x2d\x34\x65\x30\x63\x2d\x39\x38\x30\x65\x2d\x37\x34\x66\x31\x35\x33\x62\x33\x32\x33\x38\x31\x7d\x2f\x70\x6f\x6e\x2d\x7b\x30\x7d\x2f\x6f\x6e\x75\x2d\x7b\x32\x7d\x2f\x75\x6e\x69\x2d\x7b\x30\x7d\x20\x80\x08\x2a\x10\x80\x08\x81\x08\x82\x08\x83\x08\x84\x08\x85\x08\x86\x08\x87\x08",
+ b"\x08\x40\x12\x07\x58\x47\x53\x2d\x50\x4f\x4e\x1a\x42\x6f\x6c\x74\x2d\x7b\x35\x66\x35\x39\x61\x32\x32\x63\x2d\x37\x63\x37\x65\x2d\x34\x65\x30\x63\x2d\x39\x38\x30\x65\x2d\x37\x34\x66\x31\x35\x33\x62\x33\x32\x33\x38\x31\x7d\x2f\x70\x6f\x6e\x2d\x7b\x31\x7d\x2f\x6f\x6e\x75\x2d\x7b\x31\x7d\x2f\x75\x6e\x69\x2d\x7b\x30\x7d\x20\x81\x08\x2a\x10\x88\x08\x89\x08\x8a\x08\x8b\x08\x8c\x08\x8d\x08\x8e\x08\x8f\x08",
+ b"\x08\x40\x12\x07\x58\x47\x53\x2d\x50\x4f\x4e\x1a\x42\x6f\x6c\x74\x2d\x7b\x35\x66\x35\x39\x61\x32\x32\x63\x2d\x37\x63\x37\x65\x2d\x34\x65\x30\x63\x2d\x39\x38\x30\x65\x2d\x37\x34\x66\x31\x35\x33\x62\x33\x32\x33\x38\x31\x7d\x2f\x70\x6f\x6e\x2d\x7b\x31\x7d\x2f\x6f\x6e\x75\x2d\x7b\x32\x7d\x2f\x75\x6e\x69\x2d\x7b\x30\x7d\x20\x80\x08\x2a\x10\x80\x08\x81\x08\x82\x08\x83\x08\x84\x08\x85\x08\x86\x08\x87\x08",
+ b"\x08\x40\x12\x07\x58\x47\x53\x2d\x50\x4f\x4e\x1a\x42\x6f\x6c\x74\x2d\x7b\x38\x34\x62\x35\x64\x35\x61\x39\x2d\x33\x34\x64\x66\x2d\x34\x61\x33\x37\x2d\x62\x66\x37\x64\x2d\x63\x37\x37\x61\x34\x65\x33\x34\x33\x61\x37\x64\x7d\x2f\x70\x6f\x6e\x2d\x7b\x30\x7d\x2f\x6f\x6e\x75\x2d\x7b\x31\x7d\x2f\x75\x6e\x69\x2d\x7b\x30\x7d\x20\x81\x08\x2a\x10\x88\x08\x89\x08\x8a\x08\x8b\x08\x8c\x08\x8d\x08\x8e\x08\x8f\x08",
+ b"\x08\x40\x12\x07\x58\x47\x53\x2d\x50\x4f\x4e\x1a\x42\x6f\x6c\x74\x2d\x7b\x38\x34\x62\x35\x64\x35\x61\x39\x2d\x33\x34\x64\x66\x2d\x34\x61\x33\x37\x2d\x62\x66\x37\x64\x2d\x63\x37\x37\x61\x34\x65\x33\x34\x33\x61\x37\x64\x7d\x2f\x70\x6f\x6e\x2d\x7b\x30\x7d\x2f\x6f\x6e\x75\x2d\x7b\x32\x7d\x2f\x75\x6e\x69\x2d\x7b\x30\x7d\x20\x80\x08\x2a\x10\x80\x08\x81\x08\x82\x08\x83\x08\x84\x08\x85\x08\x86\x08\x87\x08",
+ b"\x08\x40\x12\x07\x58\x47\x53\x2d\x50\x4f\x4e\x1a\x42\x6f\x6c\x74\x2d\x7b\x38\x34\x62\x35\x64\x35\x61\x39\x2d\x33\x34\x64\x66\x2d\x34\x61\x33\x37\x2d\x62\x66\x37\x64\x2d\x63\x37\x37\x61\x34\x65\x33\x34\x33\x61\x37\x64\x7d\x2f\x70\x6f\x6e\x2d\x7b\x31\x7d\x2f\x6f\x6e\x75\x2d\x7b\x31\x7d\x2f\x75\x6e\x69\x2d\x7b\x30\x7d\x20\x80\x08\x2a\x10\x80\x08\x81\x08\x82\x08\x83\x08\x84\x08\x85\x08\x86\x08\x87\x08",
+ b"\x08\x40\x12\x07\x58\x47\x53\x2d\x50\x4f\x4e\x1a\x42\x6f\x6c\x74\x2d\x7b\x38\x34\x62\x35\x64\x35\x61\x39\x2d\x33\x34\x64\x66\x2d\x34\x61\x33\x37\x2d\x62\x66\x37\x64\x2d\x63\x37\x37\x61\x34\x65\x33\x34\x33\x61\x37\x64\x7d\x2f\x70\x6f\x6e\x2d\x7b\x31\x7d\x2f\x6f\x6e\x75\x2d\x7b\x32\x7d\x2f\x75\x6e\x69\x2d\x7b\x30\x7d\x20\x81\x08\x2a\x10\x88\x08\x89\x08\x8a\x08\x8b\x08\x8c\x08\x8d\x08\x8e\x08\x8f\x08",
+ b"\x08\x40\x12\x07\x58\x47\x53\x2d\x50\x4f\x4e\x1a\x42\x6f\x6c\x74\x2d\x7b\x61\x61\x34\x36\x63\x62\x63\x61\x2d\x39\x31\x64\x37\x2d\x34\x36\x64\x65\x2d\x61\x61\x30\x65\x2d\x61\x32\x65\x33\x64\x32\x36\x61\x61\x66\x36\x32\x7d\x2f\x70\x6f\x6e\x2d\x7b\x30\x7d\x2f\x6f\x6e\x75\x2d\x7b\x31\x7d\x2f\x75\x6e\x69\x2d\x7b\x30\x7d\x20\x80\x08\x2a\x10\x80\x08\x81\x08\x82\x08\x83\x08\x84\x08\x85\x08\x86\x08\x87\x08",
+ "\x08\x40\x12\x07\x58\x47\x53\x2d\x50\x4f\x4e\x1a\x42\x6f\x6c\x74\x2d\x7b\x61\x61\x34\x36\x63\x62\x63\x61\x2d\x39\x31\x64\x37\x2d\x34\x36\x64\x65\x2d\x61\x61\x30\x65\x2d\x61\x32\x65\x33\x64\x32\x36\x61\x61\x66\x36\x32\x7d\x2f\x70\x6f\x6e\x2d\x7b\x30\x7d\x2f\x6f\x6e\x75\x2d\x7b\x31\x7d\x2f\x75\x6e\x69\x2d\x7b\x30\x7d\x20\x80\x08\x2a\x10\x80\x08\x81\x08\x82\x08\x83\x08\x84\x08\x85\x08\x86\x08\x87\x08",
+ "\\x08\\x40\\x12\\x07\\x58\\x47\\x53\\x2d\\x50\\x4f\\x4e\\x1a\\x42\\x6f\\x6c\\x74\\x2d\\x7b\\x61\\x61\\x34\\x36\\x63\\x62\\x63\\x61\\x2d\\x39\\x31\\x64\\x37\\x2d\\x34\\x36\\x64\\x65\\x2d\\x61\\x61\\x30\\x65\\x2d\\x61\\x32\\x65\\x33\\x64\\x32\\x36\\x61\\x61\\x66\\x36\\x32\\x7d\\x2f\\x70\\x6f\\x6e\\x2d\\x7b\\x30\\x7d\\x2f\\x6f\\x6e\\x75\\x2d\\x7b\\x31\\x7d\\x2f\\x75\\x6e\\x69\\x2d\\x7b\\x30\\x7d\\x20\\x80\\x08\\x2a\\x10\\x80\\x08\\x81\\x08\\x82\\x08\\x83\\x08\\x84\\x08\\x85\\x08\\x86\\x08\\x87\\x08"
+ ]
+ # for message in messages:
+ # print(VolthaTools().tech_profile_decode_resource_instance(message))