From 2a884116ef8e9827667983a65cc10ae3745e826d Mon Sep 17 00:00:00 2001 From: Justin Brzozoski Date: Tue, 1 Mar 2022 22:32:22 -0500 Subject: [PATCH 1/3] python: major rewrite with more full-featured edge node examples Includes: - packaging config using setuptools - regenerated the pb2 files with the current protobuf definition and libprotoc 3.6.1 - convert all tabs to spaces - beautify all code according to PEP8 - updated examples to match new usage --- python/.gitignore | 2 + python/README.md | 32 + python/core/.gitignore | 1 + python/core/__init__.py | 0 python/core/host_session_establishment.py | 97 --- python/core/readme.txt | 2 - python/core/sparkplug_b.py | 342 -------- python/core/tahu/__init__.py | 498 ++++++++++++ python/core/tahu/edge.py | 950 ++++++++++++++++++++++ python/core/{ => tahu}/sparkplug_b_pb2.py | 375 ++++++--- python/examples/example.py | 283 ------- python/examples/example_raspberry_pi.py | 247 ------ python/examples/example_simple.py | 269 ------ python/examples/fuller_example.py | 212 +++++ python/examples/pibrella_example.py | 175 ++++ python/examples/simple_example.py | 86 ++ python/pyproject.toml | 7 + python/setup.cfg | 26 + 18 files changed, 2272 insertions(+), 1332 deletions(-) create mode 100644 python/.gitignore create mode 100644 python/README.md create mode 100644 python/core/.gitignore delete mode 100644 python/core/__init__.py delete mode 100644 python/core/host_session_establishment.py delete mode 100644 python/core/readme.txt delete mode 100644 python/core/sparkplug_b.py create mode 100644 python/core/tahu/__init__.py create mode 100644 python/core/tahu/edge.py rename python/core/{ => tahu}/sparkplug_b_pb2.py (83%) delete mode 100755 python/examples/example.py delete mode 100755 python/examples/example_raspberry_pi.py delete mode 100755 python/examples/example_simple.py create mode 100755 python/examples/fuller_example.py create mode 100755 python/examples/pibrella_example.py create mode 100755 python/examples/simple_example.py create mode 100644 python/pyproject.toml create mode 100644 python/setup.cfg diff --git a/python/.gitignore b/python/.gitignore new file mode 100644 index 00000000..9d0b71a3 --- /dev/null +++ b/python/.gitignore @@ -0,0 +1,2 @@ +build +dist diff --git a/python/README.md b/python/README.md new file mode 100644 index 00000000..35d610c9 --- /dev/null +++ b/python/README.md @@ -0,0 +1,32 @@ +Add in "function annotations" or use "typing" module? + +https://docs.python.org/3/tutorial/modules.html + +# Python specific hints and tips + +Basic instructions used to setup Python packaging found here: https://packaging.python.org/en/latest/tutorials/packaging-projects/ + +## Building PIP package + +From in the `python` directory: + +``` +python3 -m build +``` + +## Install PIP package locally for testing + +From in the `python` directory: + +``` +python3 -m pip install . +``` + +## Recompiling protobuf definition + +From the top directory of tahu: + +``` +protoc -I=sparkplug_b --python_out=python/core/tahu sparkplug_b.proto +``` + diff --git a/python/core/.gitignore b/python/core/.gitignore new file mode 100644 index 00000000..11041c78 --- /dev/null +++ b/python/core/.gitignore @@ -0,0 +1 @@ +*.egg-info diff --git a/python/core/__init__.py b/python/core/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/python/core/host_session_establishment.py b/python/core/host_session_establishment.py deleted file mode 100644 index 2f6c891b..00000000 --- a/python/core/host_session_establishment.py +++ /dev/null @@ -1,97 +0,0 @@ -"""******************************************************************************* - * Copyright (c) 2021 Ian Craggs - * - * All rights reserved. This program and the accompanying materials - * are made available under the terms of the Eclipse Public License v2.0 - * and Eclipse Distribution License v1.0 which accompany this distribution. - * - * The Eclipse Public License is available at - * https://www.eclipse.org/legal/epl-2.0/ - * and the Eclipse Distribution License is available at - * http://www.eclipse.org/org/documents/edl-v10.php. - * - * Contributors: - * Ian Craggs - initial API and implementation and/or initial documentation - *******************************************************************************""" - - -import paho.mqtt.client as mqtt -import time - -""" - - -""" -broker = "localhost" -port = 1883 -host_application_id = "HOSTAPPID" - -def control_on_message(client, userdata, msg): - if msg.topic == "SPARKPLUG_TCK/RESULT": - print("*** Result ***", msg.payload) - -def control_on_connect(client, userdata, flags, rc): - print("Control client connected with result code "+str(rc)) - # Subscribing in on_connect() means that if we lose the connection and - # reconnect then subscriptions will be renewed. - client.subscribe("SPARKPLUG_TCK/#") - -def control_on_subscribe(client, userdata, mid, granted_qos): - print("Control client subscribed") - rc = client.publish("SPARKPLUG_TCK/TEST_CONTROL", "NEW host SessionEstablishment " + host_application_id, qos=1) - -published = False -def control_on_publish(client, userdata, mid): - print("Control client published") - global published - published = True - -control_client = mqtt.Client("sparkplug_control") -control_client.on_connect = control_on_connect -control_client.on_subscribe = control_on_subscribe -control_client.on_publish = control_on_publish -control_client.on_message = control_on_message -control_client.connect(broker, port) -control_client.loop_start() - -# wait for publish to complete -while published == False: - time.sleep(0.1) - -def test_on_connect(client, userdata, flags, rc): - print("Test client connected with result code "+str(rc)) - client.subscribe("spAv1.0/#") - -def test_on_subscribe(client, userdata, mid, granted_qos): - print("Test client subscribed") - client.publish("STATE/"+host_application_id, "ONLINE", qos=1) - -published = False -def test_on_publish(client, userdata, mid): - print("Test client published") - global published - published = True - -client = mqtt.Client("clientid", clean_session=True) -client.on_connect = test_on_connect -client.on_subscribe = test_on_subscribe -client.on_publish = test_on_publish -client.will_set(topic="STATE/"+host_application_id, payload="OFFLINE", qos=1, retain=True) -client.connect(broker, port) -client.loop_start() - -while published == False: - time.sleep(0.1) - -client.loop_stop() - -published = False -control_client.publish("SPARKPLUG_TCK/TEST_CONTROL", "END TEST") -while published == False: - time.sleep(0.1) - -control_client.loop_stop() - - - - diff --git a/python/core/readme.txt b/python/core/readme.txt deleted file mode 100644 index 493df47d..00000000 --- a/python/core/readme.txt +++ /dev/null @@ -1,2 +0,0 @@ -# To generate the base protobuf sparkplug_b Python library -protoc -I=../../sparkplug_b/ --python_out=. ../../sparkplug_b/sparkplug_b.proto diff --git a/python/core/sparkplug_b.py b/python/core/sparkplug_b.py deleted file mode 100644 index 00225c67..00000000 --- a/python/core/sparkplug_b.py +++ /dev/null @@ -1,342 +0,0 @@ -#/******************************************************************************** -# * Copyright (c) 2014, 2018 Cirrus Link Solutions and others -# * -# * This program and the accompanying materials are made available under the -# * terms of the Eclipse Public License 2.0 which is available at -# * http://www.eclipse.org/legal/epl-2.0. -# * -# * SPDX-License-Identifier: EPL-2.0 -# * -# * Contributors: -# * Cirrus Link Solutions - initial implementation -# ********************************************************************************/ -import sparkplug_b_pb2 -import time -from sparkplug_b_pb2 import Payload - -seqNum = 0 -bdSeq = 0 - -class DataSetDataType: - Unknown = 0 - Int8 = 1 - Int16 = 2 - Int32 = 3 - Int64 = 4 - UInt8 = 5 - UInt16 = 6 - UInt32 = 7 - UInt64 = 8 - Float = 9 - Double = 10 - Boolean = 11 - String = 12 - DateTime = 13 - Text = 14 - -class MetricDataType: - Unknown = 0 - Int8 = 1 - Int16 = 2 - Int32 = 3 - Int64 = 4 - UInt8 = 5 - UInt16 = 6 - UInt32 = 7 - UInt64 = 8 - Float = 9 - Double = 10 - Boolean = 11 - String = 12 - DateTime = 13 - Text = 14 - UUID = 15 - DataSet = 16 - Bytes = 17 - File = 18 - Template = 19 - -class ParameterDataType: - Unknown = 0 - Int8 = 1 - Int16 = 2 - Int32 = 3 - Int64 = 4 - UInt8 = 5 - UInt16 = 6 - UInt32 = 7 - UInt64 = 8 - Float = 9 - Double = 10 - Boolean = 11 - String = 12 - DateTime = 13 - Text = 14 - -class ParameterDataType: - Unknown = 0 - Int8 = 1 - Int16 = 2 - Int32 = 3 - Int64 = 4 - UInt8 = 5 - UInt16 = 6 - UInt32 = 7 - UInt64 = 8 - Float = 9 - Double = 10 - Boolean = 11 - String = 12 - DateTime = 13 - Text = 14 - -###################################################################### -# Always request this before requesting the Node Birth Payload -###################################################################### -def getNodeDeathPayload(): - payload = sparkplug_b_pb2.Payload() - addMetric(payload, "bdSeq", None, MetricDataType.Int64, getBdSeqNum()) - return payload -###################################################################### - -###################################################################### -# Always request this after requesting the Node Death Payload -###################################################################### -def getNodeBirthPayload(): - global seqNum - seqNum = 0 - payload = sparkplug_b_pb2.Payload() - payload.timestamp = int(round(time.time() * 1000)) - payload.seq = getSeqNum() - addMetric(payload, "bdSeq", None, MetricDataType.Int64, --bdSeq) - return payload -###################################################################### - -###################################################################### -# Get the DBIRTH payload -###################################################################### -def getDeviceBirthPayload(): - payload = sparkplug_b_pb2.Payload() - payload.timestamp = int(round(time.time() * 1000)) - payload.seq = getSeqNum() - return payload -###################################################################### - -###################################################################### -# Get a DDATA payload -###################################################################### -def getDdataPayload(): - return getDeviceBirthPayload() -###################################################################### - -###################################################################### -# Helper method for adding dataset metrics to a payload -###################################################################### -def initDatasetMetric(payload, name, alias, columns, types): - metric = payload.metrics.add() - if name is not None: - metric.name = name - if alias is not None: - metric.alias = alias - metric.timestamp = int(round(time.time() * 1000)) - metric.datatype = MetricDataType.DataSet - - # Set up the dataset - metric.dataset_value.num_of_columns = len(types) - metric.dataset_value.columns.extend(columns) - metric.dataset_value.types.extend(types) - return metric.dataset_value -###################################################################### - -###################################################################### -# Helper method for adding dataset metrics to a payload -###################################################################### -def initTemplateMetric(payload, name, alias, templateRef): - metric = payload.metrics.add() - if name is not None: - metric.name = name - if alias is not None: - metric.alias = alias - metric.timestamp = int(round(time.time() * 1000)) - metric.datatype = MetricDataType.Template - - # Set up the template - if templateRef is not None: - metric.template_value.template_ref = templateRef - metric.template_value.is_definition = False - else: - metric.template_value.is_definition = True - - return metric.template_value -###################################################################### - -###################################################################### -# Helper method for adding metrics to a container which can be a -# payload or a template -###################################################################### -def addMetric(container, name, alias, type, value): - metric = container.metrics.add() - if name is not None: - metric.name = name - if alias is not None: - metric.alias = alias - metric.timestamp = int(round(time.time() * 1000)) - - # print( "Type: " + str(type)) - - if type == MetricDataType.Int8: - metric.datatype = MetricDataType.Int8 - metric.int_value = value - elif type == MetricDataType.Int16: - metric.datatype = MetricDataType.Int16 - metric.int_value = value - elif type == MetricDataType.Int32: - metric.datatype = MetricDataType.Int32 - metric.int_value = value - elif type == MetricDataType.Int64: - metric.datatype = MetricDataType.Int64 - metric.long_value = value - elif type == MetricDataType.UInt8: - metric.datatype = MetricDataType.UInt8 - metric.int_value = value - elif type == MetricDataType.UInt16: - metric.datatype = MetricDataType.UInt16 - metric.int_value = value - elif type == MetricDataType.UInt32: - metric.datatype = MetricDataType.UInt32 - metric.int_value = value - elif type == MetricDataType.UInt64: - metric.datatype = MetricDataType.UInt64 - metric.long_value = value - elif type == MetricDataType.Float: - metric.datatype = MetricDataType.Float - metric.float_value = value - elif type == MetricDataType.Double: - metric.datatype = MetricDataType.Double - metric.double_value = value - elif type == MetricDataType.Boolean: - metric.datatype = MetricDataType.Boolean - metric.boolean_value = value - elif type == MetricDataType.String: - metric.datatype = MetricDataType.String - metric.string_value = value - elif type == MetricDataType.DateTime: - metric.datatype = MetricDataType.DateTime - metric.long_value = value - elif type == MetricDataType.Text: - metric.datatype = MetricDataType.Text - metric.string_value = value - elif type == MetricDataType.UUID: - metric.datatype = MetricDataType.UUID - metric.string_value = value - elif type == MetricDataType.Bytes: - metric.datatype = MetricDataType.Bytes - metric.bytes_value = value - elif type == MetricDataType.File: - metric.datatype = MetricDataType.File - metric.bytes_value = value - elif type == MetricDataType.Template: - metric.datatype = MetricDataType.Template - metric.template_value = value - else: - print( "Invalid: " + str(type)) - - # Return the metric - return metric -###################################################################### - -###################################################################### -# Helper method for adding metrics to a container which can be a -# payload or a template -###################################################################### -def addHistoricalMetric(container, name, alias, type, value): - metric = addMetric(container, name, alias, type, value) - metric.is_historical = True - - # Return the metric - return metric -###################################################################### - -###################################################################### -# Helper method for adding metrics to a container which can be a -# payload or a template -###################################################################### -def addNullMetric(container, name, alias, type): - metric = container.metrics.add() - if name is not None: - metric.name = name - if alias is not None: - metric.alias = alias - metric.timestamp = int(round(time.time() * 1000)) - metric.is_null = True - - # print( "Type: " + str(type)) - - if type == MetricDataType.Int8: - metric.datatype = MetricDataType.Int8 - elif type == MetricDataType.Int16: - metric.datatype = MetricDataType.Int16 - elif type == MetricDataType.Int32: - metric.datatype = MetricDataType.Int32 - elif type == MetricDataType.Int64: - metric.datatype = MetricDataType.Int64 - elif type == MetricDataType.UInt8: - metric.datatype = MetricDataType.UInt8 - elif type == MetricDataType.UInt16: - metric.datatype = MetricDataType.UInt16 - elif type == MetricDataType.UInt32: - metric.datatype = MetricDataType.UInt32 - elif type == MetricDataType.UInt64: - metric.datatype = MetricDataType.UInt64 - elif type == MetricDataType.Float: - metric.datatype = MetricDataType.Float - elif type == MetricDataType.Double: - metric.datatype = MetricDataType.Double - elif type == MetricDataType.Boolean: - metric.datatype = MetricDataType.Boolean - elif type == MetricDataType.String: - metric.datatype = MetricDataType.String - elif type == MetricDataType.DateTime: - metric.datatype = MetricDataType.DateTime - elif type == MetricDataType.Text: - metric.datatype = MetricDataType.Text - elif type == MetricDataType.UUID: - metric.datatype = MetricDataType.UUID - elif type == MetricDataType.Bytes: - metric.datatype = MetricDataType.Bytes - elif type == MetricDataType.File: - metric.datatype = MetricDataType.File - elif type == MetricDataType.Template: - metric.datatype = MetricDataType.Template - else: - print( "Invalid: " + str(type)) - - # Return the metric - return metric -###################################################################### - -###################################################################### -# Helper method for getting the next sequence number -###################################################################### -def getSeqNum(): - global seqNum - retVal = seqNum - # print("seqNum: " + str(retVal)) - seqNum += 1 - if seqNum == 256: - seqNum = 0 - return retVal -###################################################################### - -###################################################################### -# Helper method for getting the next birth/death sequence number -###################################################################### -def getBdSeqNum(): - global bdSeq - retVal = bdSeq - # print("bdSeqNum: " + str(retVal)) - bdSeq += 1 - if bdSeq == 256: - bdSeq = 0 - return retVal -###################################################################### diff --git a/python/core/tahu/__init__.py b/python/core/tahu/__init__.py new file mode 100644 index 00000000..a9088cfe --- /dev/null +++ b/python/core/tahu/__init__.py @@ -0,0 +1,498 @@ +""" +Core SparkplugB/MQTT library from Eclipse +""" + +#/******************************************************************************** +# * Copyright (c) 2014, 2018, 2020, 2022 Cirrus Link Solutions and others +# * +# * This program and the accompanying materials are made available under the +# * terms of the Eclipse Public License 2.0 which is available at +# * http://www.eclipse.org/legal/epl-2.0. +# * +# * SPDX-License-Identifier: EPL-2.0 +# * +# * Contributors: +# * Cirrus Link Solutions - initial implementation +# * Justin Brzozoski @ SignalFire Wireless Telemetry - major rewrite +# ********************************************************************************/ + +import time +import enum +from . import sparkplug_b_pb2 + +class SparkplugDecodeError(ValueError): + """Exception type for all errors related to decoding SparkplugB payloads""" + pass + + +class DataType(enum.IntEnum): + """Enumeration of all SparkplugB datatype values""" + Unknown = sparkplug_b_pb2.Unknown + Int8 = sparkplug_b_pb2.Int8 + Int16 = sparkplug_b_pb2.Int16 + Int32 = sparkplug_b_pb2.Int32 + Int64 = sparkplug_b_pb2.Int64 + UInt8 = sparkplug_b_pb2.UInt8 + UInt16 = sparkplug_b_pb2.UInt16 + UInt32 = sparkplug_b_pb2.UInt32 + UInt64 = sparkplug_b_pb2.UInt64 + Float = sparkplug_b_pb2.Float + Double = sparkplug_b_pb2.Double + Boolean = sparkplug_b_pb2.Boolean + String = sparkplug_b_pb2.String + DateTime = sparkplug_b_pb2.DateTime + Text = sparkplug_b_pb2.Text + UUID = sparkplug_b_pb2.UUID + DataSet = sparkplug_b_pb2.DataSet + Bytes = sparkplug_b_pb2.Bytes + File = sparkplug_b_pb2.File + Template = sparkplug_b_pb2.Template + PropertySet = sparkplug_b_pb2.PropertySet + PropertySetList = sparkplug_b_pb2.PropertySetList + + +def _get_type_from_datatype(datatype): + """Return the best Python type to handle a SparkplugB DataType if one exists, None otherwise""" + # TODO - Figure out the best way to handle the complex types in this list. + # For now, they are commented out to indicate there is no native Python type. + PYTHON_TYPE_PER_DATATYPE = { + #DataType.Unknown : None, + DataType.Int8: int, + DataType.Int16: int, + DataType.Int32: int, + DataType.Int64: int, + DataType.UInt8: int, + DataType.UInt16: int, + DataType.UInt32: int, + DataType.UInt64: int, + DataType.Float: float, + DataType.Double: float, + DataType.Boolean: bool, + DataType.String: str, + DataType.DateTime: int, + DataType.Text: str, + DataType.UUID: str, + #DataType.DataSet : DataSet, + DataType.Bytes: bytes, + DataType.File: bytes, + #DataType.Template : lambda x : x, + #DataType.PropertySet : lambda x : x, + #DataType.PropertySetList : lambda x : x, + } + return PYTHON_TYPE_PER_DATATYPE.get(datatype, None) + +def _get_datatype_from_type(pytype): + """Return the best SparkplugB DataType type to handle a Python type if one exists, None otherwise""" + DATATYPE_PER_PYTHON_TYPE = { + int: DataType.Int64, + float: DataType.Double, + bool: DataType.Boolean, + str: DataType.String, + bytes: DataType.Bytes, + } + return DATATYPE_PER_PYTHON_TYPE.get(pytype, None) + +def _get_usable_value_fields_for_datatype(datatype): + """Return a set of "oneof value" field names that we are willing to read a value from for a given SparkplugB DataType""" + # NOTE: This is not normative by spec, but is useful when talking to an imperfect + # implementation on the other side. It lists for each expected datatype + # which value fields we will try and read from without complaint when + # we receive a payload. + CONVERTIBLE_VALUE_FIELD_PER_DATATYPE = { + #DataType.Unknown: set(), + DataType.Int8: set(('int_value', 'long_value', 'boolean_value')), + DataType.Int16: set(('int_value', 'long_value', 'boolean_value')), + DataType.Int32: set(('int_value', 'long_value', 'boolean_value')), + DataType.Int64: set(('int_value', 'long_value', 'boolean_value')), + DataType.UInt8: set(('int_value', 'long_value', 'boolean_value')), + DataType.UInt16: set(('int_value', 'long_value', 'boolean_value')), + DataType.UInt32: set(('int_value', 'long_value', 'boolean_value')), + DataType.UInt64: set(('int_value', 'long_value', 'boolean_value')), + DataType.Float: set(('float_value', 'double_value')), + DataType.Double: set(('float_value', 'double_value')), + DataType.Boolean: set(('int_value', 'long_value', 'boolean_value')), + DataType.String: set(('string_value')), + DataType.DateTime: set(('long_value')), + DataType.Text: set(('string_value')), + DataType.UUID: set(('string_value')), + DataType.DataSet: set(('dataset_value')), + DataType.Bytes: set(('bytes_value')), + DataType.File: set(('bytes_value')), + DataType.Template: set(('template_value')), + } + return CONVERTIBLE_VALUE_FIELD_PER_DATATYPE.get(datatype, set()) + +def _is_int_datatype(datatype): + """Return whether SparkplugB DataType is an integer type""" + return (datatype in (DataType.Int8, DataType.UInt8, DataType.Int16, + DataType.UInt16, DataType.Int32, DataType.UInt32, + DataType.Int64, DataType.UInt64)) + +def _get_min_max_limits_per_int_datatype(datatype): + """Return a tuple with "allowable" (min, max) range for a given integer SparkplugB DataType""" + # I could not find these constant limits in Python ... + # It's not in ctypes or anywhere else AFAIK! + MIN_MAX_LIMITS_PER_INTEGER_DATATYPE = { + DataType.Int8: (-128, 127), + DataType.UInt8: (0, 255), + DataType.Int16: (-32768, 32767), + DataType.UInt16: (0, 65535), + DataType.Int32: (-2147483648, 2147483647), + DataType.UInt32: (0, 4294967295), + DataType.Int64: (-9223372036854775808, 9223372036854775807), + DataType.UInt64: (0, 18446744073709551615), + } + return MIN_MAX_LIMITS_PER_INTEGER_DATATYPE[datatype] + +def timestamp_to_sparkplug(utc_seconds=None): + """ + Convert a timestamp to SparkplugB DateTime value + + If called without a parameter, uses the current system time. + + To convert a Python datetime object, pass in the datetime.timestamp like this: + + sample_datetime = datetime(2006, 11, 21, 16, 30, tzinfo=timezone.utc) + sparkplug_b.timestamp_to_sparkplug(sample_datetime.timestamp()) + + :param utc_seconds: seconds since Unix epoch UTC (optional, default=current time) + + """ + if utc_seconds is None: + utc_seconds = time.clock_gettime(time.CLOCK_REALTIME) + return int(utc_seconds * 1000) + +def timestamp_from_sparkplug(sparkplug_time): + """ + Convert a SparkplugB DateTime value to a timestamp + + To convert back to a Python datetime object, use the output with datetime.fromtimestamp like this: + + datetime.fromtimestamp(sparkplug_b.timestamp_from_sparkplug(value), timezone.utc) + + :param sparkplug_time: SparkplugB DateTime value + + """ + return (float(sparkplug_time) / 1000.0) + +def value_to_sparkplug(container, datatype, value, u32_in_long=False): + """ + Help pass a value into a payload container in preparation of protobuf packing + + Several structure types in the SparkplugB protobuf definition contain "oneof value" structs within them. + This function helps pass a value into one of those structures, using the correct oneof sub-field and data conversion or casting rules, based on the other parameters. + + Will raise ValueError if the datatype requested cannot be handled + + :param container: a Sparkplug Payload.Template.Parameter, Payload.DataSet.DataSetValue, Payload.PropertyValue, or Payload.Metric message object to fill in + :param datatype: the Sparkplug DataType of the value + :param value: the value to store + :param u32_in_long: whether to put UInt32 DataType in long_value or int_value (Default value = False) + """ + # The Sparkplug B protobuf schema doesn't make use of signed ints. + # We have to do byte-casting because of this when handling anything signed. + # Tests well against Ignition 8.1.1 with u32_in_long=True + # TODO - Add is_null support if value is None + # TODO - Should we clamp any outgoing values larger than the datatype supports? + if u32_in_long and datatype == DataType.UInt32: + container.long_value = value + elif datatype in [DataType.Int8, DataType.Int16, DataType.Int32]: + bytes = int(value).to_bytes(4, 'big', signed=True) + container.int_value = int().from_bytes(bytes, 'big', signed=False) + elif datatype == DataType.Int64: + bytes = int(value).to_bytes(8, 'big', signed=True) + container.long_value = int().from_bytes(bytes, 'big', signed=False) + elif datatype in [DataType.UInt8, DataType.UInt16, DataType.UInt32]: + container.int_value = value + elif datatype in [DataType.UInt64, DataType.DateTime]: + container.long_value = value + elif datatype == DataType.Float: + container.float_value = value + elif datatype == DataType.Double: + container.double_value = value + elif datatype == DataType.Boolean: + container.boolean_value = value + elif datatype in [DataType.String, DataType.Text, DataType.UUID]: + container.string_value = value + elif datatype in [DataType.Bytes, DataType.File]: + container.bytes_value = value + elif datatype == DataType.Template: + value.to_sparkplug_template(container.template_value, u32_in_long) + elif datatype == DataType.DataSet: + value.to_sparkplug_dataset(container.dataset_value, u32_in_long) + else: + raise ValueError('Unhandled datatype={} in value_to_sparkplug'.format(datatype)) + +def value_from_sparkplug(container, datatype): + """ + Help read a value out of a payload container after protobuf unpacking + + Several structure types in the SparkplugB protobuf definition contain "oneof value" structs within them. + This function helps read a value from one of those structures, using the correct oneof sub-field and data conversion or casting rules, based on the other parameters. + + Returns the value in an appropriate Python datatype + + Will raise SparkplugDecodeError if the "oneof value" portion of the payload is not setup properly or the datatype cannot be handled. + + :param container: a Sparkplug Payload.Template.Parameter, Payload.DataSet.DataSetValue, Payload.PropertyValue, or Payload.Metric message object to read from + :param datatype: the Sparkplug DataType of the value + + """ + # The Sparkplug B protobuf schema doesn't make use of signed ints. + # We have to do byte-casting because of this when handling anything signed. + # We try to be flexible when handling incoming values because there are some bad + # implementations out there that might use the wrong value field. + # We clamp values on any incoming integers larger than the datatype supports. + # Tests well against Ignition 8.1.1 + try: + has_null = container.HasField('is_null') + except ValueError: + has_null = False + if has_null and container.is_null: + return None + value_field = container.WhichOneof('value') + if value_field is None: + raise SparkplugDecodeError('No value field present') + if value_field not in _get_usable_value_fields_for_datatype(datatype): + raise SparkplugDecodeError('Unexpected value field {} for datatype {}'.format(value_field, + datatype)) + value = getattr(container, value_field) + if _is_int_datatype(datatype): + value_min, value_max = _get_min_max_limits_per_int_datatype(datatype) + if value_min < 0: + # If we're expecting a signed value, we need to cast if reading from int_value or long_value + # since they are unsigned in the protobuf + if value_field == 'int_value': + bytes = value.to_bytes(4, 'big', signed=False) + value = int().from_bytes(bytes, 'big', signed=True) + elif value_field == 'long_value': + bytes = value.to_bytes(8, 'big', signed=False) + value = int().from_bytes(bytes, 'big', signed=True) + # Now we clamp them to the limits + if value < value_min: + value = value_min + elif value > value_max: + value = value_max + if datatype == DataType.Template: + return Template.from_sparkplug_template(value) + if datatype == DataType.DataSet: + return DataSet.from_sparkplug_dataset(value) + pytype = _get_type_from_datatype(datatype) + if pytype is not None: + return pytype(value) + raise SparkplugDecodeError('Unhandled datatype={} in value_from_sparkplug'.format(datatype)) + +def mqtt_params(server, port=None, + username=None, password=None, + client_id=None, keepalive=60, + tls_enabled=False, ca_certs=None, certfile=None, + keyfile=None): + """ + Collect all setup parameters for a single MQTT connection into a object to be used when initializing a Node + + Most of these parameters are simply passed to the relevant Paho MQTT API. + See https://pypi.org/project/paho-mqtt/ for more explanation. + + :param server: hostname or IP address or MQTT server + :param port: TCP port (optional: defaults to 1883 or 8883 depending on tls_enabled) + :param username: username (optional, defaults to None) + :param password: password (optional, defaults to None) + :param client_id: client ID (optional, defaults to "__" for edge nodes) + :param keepalive: keepalive seconds (optional, defaults to 60) + :param tls_enabled: whether to enable SSL/TLS (optional, defaults to False) + :param ca_certs: a string path to the Certificate Authority certificate files that are to be treated as trusted by this client (optional, defaults to None) + :param certfile: strings pointing to the PEM encoded client certificate (optional, defaults to None) + :param keyfile: strings pointing to the PEM encoded client private keys (optional, defaults to None) + """ + mqtt_params = {} + mqtt_params['client_id'] = client_id + mqtt_params['server'] = server + mqtt_params['port'] = port if port else (8883 if tls_enabled else 1883) + mqtt_params['username'] = username + mqtt_params['password'] = password + mqtt_params['keepalive'] = keepalive + mqtt_params['tls_enabled'] = tls_enabled + mqtt_params['ca_certs'] = ca_certs + mqtt_params['certfile'] = certfile + mqtt_params['keyfile'] = keyfile + return mqtt_params + + +# TODO - Create a template object type + +class DataSet(object): + """DataSet: object for working with SparkplugB dataset values""" + + # TODO - Add methods to allow easy value access by indices, e.g. with DataSet D you could just reference D[0][0] or D[0][column_name] + + def __init__(self, name_datatype_tuples): + self._num_columns = len(name_datatype_tuples) + if self._num_columns == 0: + raise ValueError('dataset must have at least one column') + self._column_names = [str(n) for n in name_datatype_tuples.keys()] + self._column_datatypes = [DataType(d) for d in name_datatype_tuples.values()] + self._data = [] + + def add_rows(self, data, keyed=False, in_columns=False, + insert_index=None): + """ + Add rows to an existing DataSet object + + Takes in data in one of a few formats, and an optional int row index specifying where to insert the new rows. + + Modifies the (mutable) dataset object being operated on. Returns nothing. + + :param data: new data rows to add to the dataset + :param keyed: whether to locate the columns via dict keys (True), or via ordered list index (False, default) + :param in_columns: whether data goes down each column first (True), or across each row first (False, default) + :param insert_index: which row index of the original dataset to insert the new data before; 0 means insert at beginning, default (None) means add to end + + Until I write better docs, here's some samples of the expected formats of data. + + Let's say you have three columns named 'A', 'B', 'C'. + You want to push in data rows that would look like this in a tabular layout: + A B C + 1 2 3 + 4 5 6 + 7 8 9 + + Here's the different ways you could pass that in: + keyed=False in_columns=False data=[[1,2,3],[4,5,6],[7,8,9]] + keyed=True in_columns=False data=[{'A':1, 'B':2, 'C':3},{'A':4, 'B':5, 'C':6},{'A':7, 'B':7, 'C':9}] + keyed=False in_columns=True data=[[1,4,7],[2,5,8],[3,6,9]] + keyed=True in_columns=True data={'A':[1,4,7], 'B':[2,5,8], 'C':[3,6,9]} + + This convenience provided since I know you don't always have easy ways to get the data in + one format or another, and you shouldn't have to waste any more of your time re-writing the same + conversion functions over and over when I could just do it once for you. + """ + if ((data is None) or (len(data) == 0)): + return + new_data = [] + col_keys = self._columns_names if keyed else range(self._num_columns) + col_python_types = [_get_type_from_datatype(self._column_datatypes[x]) for x in range(self._num_columns)] + col_helper = tuple(zip(col_keys, col_python_types)) + if not in_columns: + for row in data: + new_row = [] + for k,t in col_helper: + new_row.append(t(row[k])) + new_data.append(new_row) + else: + num_rows = len(data[col_keys[0]]) + for k in col_keys[1:]: + if len(data[k]) != num_rows: + raise ValueError('data does not have {} rows in all columns'.format(num_rows)) + for row_index in range(num_rows): + new_row = [] + for k,t in col_helper: + new_row.append(t(data[k][row_index])) + new_data.append(new_row) + if insert_index: + # This is a neat Python trick. + # You can assign a new list to a slice of a list, and it will replace + # the values within the slice with the values from the new list. + # But if your slice is reduced to size 0, it will just insert the elements at that index. + self._data[insert_index:insert_index] = new_data + else: + self._data.extend(new_data) + + def get_num_columns(self): + """Return the number of columns in the DataSet""" + return self._num_columns + + def get_num_rows(self): + """Return the number of rows in the DataSet""" + return len(self._data) + + def remove_rows(self, start_index=0, end_index=None, num_rows=None): + """ + Remove a contiguous set of rows from the DataSet + + :param start_index: first row to remove (optional, default=0) + :param end_index: last row to remove (optional) + :param num_rows: numer of rows to remove (optional) + + You should only provide one of end_index or num_rows, not both, else behavior is undefined. + """ + if not end_index: + end_index = (start_index + + num_rows) if num_rows else len(self._data) + self._data[start_index:end_index] = [] + + def get_rows(self, start_index=0, end_index=None, num_rows=None, + in_columns=False, keyed=False): + """ + Returns a copy of the data from one or more rows in the DataSet + + See the comments on add_rows for an explanation of in_columns and keyed. + + You should only provide one of end_index or num_rows, not both, else behavior is undefined. + + :param start_index: index of first row (optional, default = 0) + :param end_index: index of last row (optional) + :param num_rows: number of rows to copy (optional) + :param in_columns: whether data goes down each column first (True), or across each row first (False, default) + :param keyed: whether to locate the columns via dict keys (True), or via ordered list index (False, default) + + """ + if not end_index: + end_index = (start_index + + num_rows) if num_rows else len(self._data) + if not in_columns: + if keyed: + return [dict(zip(self._column_names, + row)) for row in self._data[start_index:end_index]] + return self._data[start_index:end_index] + if not keyed: + data = [] + for k in range(self._num_columns): + data.append([self._data[r][k] for r in range(start_index, + end_index)]) + return data + data = {} + for k in range(len(self._column_names)): + data[self._column_names[k]] = [self._data[r][k] for r in range(start_index, + end_index)] + return data + + def to_sparkplug_dataset(self, sp_dataset, u32_in_long=False): + """ + Copy the DataSet into a SparkplugB Payload.DataSet + + :param sp_dataset: SparkplugB Payload.DataSet to modify + :param u32_in_long: whether to put UInt32 DataType in long_value or int_value (Default value = False) + """ + sp_dataset.num_of_columns = self._num_columns + sp_dataset.columns.extend(self._column_names) + sp_dataset.types.extend(self._column_datatypes) + for data_row in self._data: + sp_row = sp_dataset.rows.add() + for c in range(self._num_columns): + dataset_value = sp_row.elements.add() + value_to_sparkplug(dataset_value, self._column_datatypes[c], + data_row[c], u32_in_long) + return sp_dataset + + @classmethod + def from_sparkplug_dataset(cls, sp_dataset): + """ + Create a new DataSet object from a SparkplugB Payload.DataSet + + Returns a new DataSet object + + :param sp_dataset: SparkplugB Payload.DataSet to copy from + """ + try: + new_dataset = cls(dict(zip(sp_dataset.columns, sp_dataset.types))) + except ValueError as errmsg: + raise SparkplugDecodeError(errmsg) + for sp_row in sp_dataset.rows: + new_row = [] + for c in range(new_dataset._num_columns): + value = value_from_sparkplug(sp_row.elements[c], + new_dataset._column_datatypes[c]) + new_row.append(value) + new_dataset._data.append(new_row) + return new_dataset + diff --git a/python/core/tahu/edge.py b/python/core/tahu/edge.py new file mode 100644 index 00000000..42055001 --- /dev/null +++ b/python/core/tahu/edge.py @@ -0,0 +1,950 @@ +""" +Edge Node and Device SparkplugB/MQTT library from Eclipse +""" + +#/******************************************************************************** +# * Copyright (c) 2022 Justin Brzozoski +# * +# * This program and the accompanying materials are made available under the +# * terms of the Eclipse Public License 2.0 which is available at +# * http://www.eclipse.org/legal/epl-2.0. +# * +# * SPDX-License-Identifier: EPL-2.0 +# * +# * Contributors: +# * Justin Brzozoski @ SignalFire Wireless Telemetry +# ********************************************************************************/ + +import os +import threading +import logging +import enum +import paho.mqtt.client as mqtt +import tahu +from . import sparkplug_b_pb2 + +def _rebirth_command_handler(tag, context, value): + """ + Metric command handler for "Node Control/Rebirth" + + This does the well-known action in response to a Rebirth command, and causes the + edge node's birth messages to be re-sent. + + :param tag: Metric object that this command was received on + :param context: the optional cmd_context object provided to the Metric when it was created + :param value: the new value received over Sparkplug + + """ + # TODO - Add support for rebirth requests on subdevices + tag._logger.info('Rebirth command received') + assert(isinstance(tag._parent_device, Node)) + # We don't care what value the server wrote to the tag, any write is considered a trigger. + tag._parent_device._needs_to_send_birth = True + +def _next_server_command_handler(tag, context, value): + """ + Metric command handler for "Node Control/Next Server" + + This does the well-known action in response to a Next Server command, and causes the + edge node to disconnect from the MQTT broker and immediately reconnect to the next known + server. This will be the same server if only one is configured. + + :param tag: Metric object that this command was received on + :param context: the optional cmd_context object provided to the Metric when it was created + :param value: the new value received over Sparkplug + + """ + tag._logger.info('Next Server command received') + assert(isinstance(tag._parent_device, Node)) + # We don't care what value the server wrote to the tag, any write is considered a trigger. + tag._parent_device._mqtt_param_index = (tag._parent_device._mqtt_param_index + + 1) \ + % len(tag._parent_device._mqtt_params) + tag._parent_device._reconnect_client = True + + +class Metric(object): + """ + The Metric object manages all aspects of a single metric + + The change_value is used to report new values over Sparkplug, and the cmd_handler provided when created will be called if a new value is received from Sparkplug. + + """ + def __init__(self, parent_device, name, datatype=None, value=None, + cmd_handler=None, cmd_context=None): + """ + Initialize a Metric object + + When the object is created, the parent_device and name must be provided, as well as either a datatype or initial value. + + :param parent_device: the Node or Device object this metric will be attached to + :param name: the name of this metric within the node or device (must be unique within this device) + :param datatype: optional tahu.DataType for this metric. if not specified, will be auto-detected from the initial value + :param value: optional initial value for this metric. if not specified, the datatype parameter is required to specify the type explicitly + :param cmd_handler: optional handler callback which will be triggered when a NCMD/DCMD message is received for this metric + :param cmd_context: optional context to pass to the cmd_handler if desired + """ + # TODO - Protect the name/alias from being changed after creation + if datatype is None and value is None: + raise ValueError('Unable to define metric without explicit datatype or initial value') + self._parent_device = parent_device + self._logger = parent_device._logger + self._u32_in_long = parent_device._u32_in_long + self.name = str(name) + if datatype: + self._datatype = tahu.DataType(datatype) + else: + self._datatype = tahu._get_datatype_from_type(type(value)) + if self._datatype is None: + raise ValueError('Need explicit datatype for Python type {}'.format(type(value))) + + self._value = value + self._last_received = None + self._last_sent = None + self._cmd_handler = cmd_handler + self._cmd_context = cmd_context + self._properties = [] + self.alias = parent_device._attach_tag(self) + + def _attach_property(self, property): + """ + Attach a Sparkplug property object to this metric + + This method is normally not called directly, but instead by the init functions of the property object. + + :param property: the property object to attach + + """ + next_index = len(self._properties) + self._properties.append(property) + # TODO - Add checking/handling depending if we're connected + return next_index + + def _fill_in_payload_metric(self, new_metric, birth=False): + """ + Fill in the Metric message object provided with the metrics most recent values + + :param new_metric: a Sparkplug Payload.Metric message object to fill in + :param birth: a boolean indicating if this is part of a birth payload and should include all properties. if false, will only include those properties that can change dynamically (Default value = False) + + """ + if birth: + new_metric.name = self.name + new_metric.alias = self.alias + new_metric.datatype = self._datatype + # Add properties + for p in self._properties: + # This chunk could arguably be a method of the property, but I + # felt it made more sense here because of the way the + # PropertySet protobuf object works... + if birth or p._report_with_data: + new_metric.properties.keys.append(p._name) + pvalue = new_metric.properties.values.add() + pvalue.type = p._datatype + tahu.value_to_sparkplug(pvalue, pvalue.type, + p._value, + self._u32_in_long) + p._last_sent = p._value + # Add the current value or set is_null if None + if self._value is None: + new_metric.is_null = True + else: + tahu.value_to_sparkplug(new_metric, self._datatype, + self._value, + self._u32_in_long) + self._last_sent = self._value + + def change_value(self, value, send_immediate=True): + """ + Update the known value of the metric and optionally cause a payload to be sent immediately + + Returns an alias number to use with send_data to trigger a sending of this metric manually + + :param value: the new value of the metric + :param send_immediate: whether this change should trigger a payload containing this metric to be sent immediately. If true, other unchanged metrics will not be sent with this metric, and this metric will be sent even if the new value is identical to the previous. (Default value = True) + + """ + self._value = value + if send_immediate: + self._parent_device.send_data([self.alias]) + return self.alias + + def _handle_sparkplug_command(self, Metric): + """ + Stub for handling received metrics and calling out to cmd_handler hooks as needed + + :param Metric: the Sparkplug Payload.Metric message received for this metric + + """ + # Note that we enforce OUR expected datatype on the value as we pull it from the metric + try: + value = tahu.value_from_sparkplug(Metric, + self._datatype) + except tahu.SparkplugDecodeError as errmsg: + self._logger.warning('Sparkplug decode error for tag {}: {}'.format(self.name, + errmsg)) + return + self._logger.debug('Command received for tag {} = {}'.format(self.name, + value)) + if self._cmd_handler: + self._cmd_handler(self, self._cmd_context, value) + else: + self._logger.info('Received command for tag {} with no handler. No action taken.'.format(self.name)) + self._last_received = value + + def changed_since_last_sent(self): + """If the metric value or any of the dynamic properties have changed since the most recent publish, returns true""" + for p in self._properties: + if p._report_with_data and p.changed_since_last_sent(): + return True + return (self._value != self._last_sent) + + +class MetricProperty(object): + """ + The MetricProperty object manages all aspects of a single metric property + """ + def __init__(self, parent_metric, name, datatype, value, + report_with_data=False): + """ + Initialize a MetricProperty object + + When the object is created, the parent_metric, name, datatype, and value must be provided. + + :param parent_metric: the Metric object this property will be attached to + :param name: the name of this property within the metric (must be unique within this metric) + :param datatype: tahu.DataType for this property + :param value: initial value for this property + :param report_with_data: whether this property should be included in every DATA publish, or only with BIRTH (Default value = False) + """ + self._parent_metric = parent_metric + self._name = str(name) + if datatype: + self._datatype = tahu.DataType(datatype) + else: + self._datatype = tahu._get_datatype_from_type(type(value)) + if self._datatype is None: + raise ValueError('Need explicit datatype for Python type {}'.format(type(value))) + self._value = value + self._report_with_data = bool(report_with_data) + self._last_sent = None + self._parent_metric._attach_property(self) + + def changed_since_last_sent(self): + """If the preoprty value has changed since the most recent publish, returns true""" + return (self._value != self._last_sent) + + def change_value(self, value, send_immediate=False): + """ + Update the value of the property and optionally cause a payload to be sent immediately + + Returns an alias number to use with send_data to trigger a sending of the parent metric manually + + :param value: new property value + :param send_immediate: whether this change should trigger a payload containing the containing metric to be sent immediately. If true, other unchanged metrics will not be sent with this metric, and this metric will be sent even if the new value is identical to the previous. (Default value = False) + + """ + # TODO - Trigger rebirth if someone changes a property that is not report_with_data? + self._value = value + if self._report_with_data and send_immediate: + self._parent_metric._parent_device.send_data([self._parent_metric.alias]) + return self._parent_metric.alias + + +def bulk_properties(parent_metric, property_dict): + """ + Create multiple property objects and attach them all to the same metric quickly and easily + + This function is useful for creating many properties that will not change value dynamically with simple datatypes. The Sparkplug datatype of each property is autodetected based on the Python type of the corresponding value. + + Returns a list of MetricProperty objects + + :param parent_metric: the Metric object to attach the properties to + :param property_dict: a dict mapping property names (keys) to values + + """ + return [MetricProperty(parent_metric, name, None, property_dict[name], + False) for name in property_dict.keys()] + + +class _AbstractBaseDevice(object): + """ + A base object type containing common aspects of both Node and Device objects + + The _AbstractBaseDevice should not be instantiated directly. Use Node or Device instead. + """ + def __init__(self): + self._mqtt_client = None + self._tags = [] + self._needs_to_send_birth = True + + def get_tag_names(self): + """Return a list of the names of all metrics on this device""" + return [m.name for m in self._tags] + + def _get_next_seq(self): + """Returns the Sparkplug `seq` number to use on the next publish""" + raise NotImplementedError('_get_next_seq not implemented on this class') + + def _attach_tag(self, tag): + """ + Attach a Metric object to this device + + This method is normally not called directly, but instead by the init functions of the Metric object. + + :param tag: the metric object to attach + + """ + next_index = len(self._tags) + self._tags.append(tag) + if self.is_connected(): + self.send_death() + self._needs_to_send_birth = True + return next_index + + # TODO - Add another function to remove a tag + + def _get_payload(self, alias_list, birth): + """ + Create and return a Sparkplug Payload message for this device and the given metric aliases + + Do not call directly. Use the send_data or send_birth device methods instead. + + :param alias_list: list of aliases to include in payload (ignored if birth=True) + :param birth: bool to indicate if this payload is a birth. includes all metrics and all properties if so. + + """ + tx_payload = sparkplug_b_pb2.Payload() + tx_payload.timestamp = tahu.timestamp_to_sparkplug() + tx_payload.seq = self._get_next_seq() + if birth: + alias_list = range(len(self._tags)) + for m in alias_list: + new_metric = tx_payload.metrics.add() + self._tags[m]._fill_in_payload_metric(new_metric, birth=birth) + return tx_payload + + def _get_topic(self, cmd_type): + """ + Return the topic string to use for a command of the type given on this device object + + Not normally called directly. + + :param cmd_type: string indicating message type; usually one of 'BIRTH', 'DEATH', 'DATA', or 'CMD' + + """ + raise NotImplementedError('_get_topic not implemented on this class') + + def send_birth(self): + """ + Generate and send a birth message for this device. + + Will trigger births on parent device or subdevices as needed. + + Returns the result of calling the `publish` function of the MQTT client library (paho) + + """ + raise NotImplementedError('send_birth not implemented on this class') + + def send_death(self): + """ + Generate and send a death message for this device. + + Will flag device(s) to send birth on next update as needed. + + Returns the result of calling the `publish` function of the MQTT client library (paho) + + """ + raise NotImplementedError('send_death not implemented on this class') + + def send_data(self, aliases=None, changed_only=False): + """ + Generate and send a data message for this device. + + Returns the result of calling the `publish` function of the MQTT client library (paho) + + :param aliases: list of metric aliases to include in this payload; if None will include all metrics (Default value = None) + :param changed_only: whether to filter the metrics to only include those that have changed since the prior publish (Default value = False) + + """ + if not self.is_connected(): + self._logger.warning('Trying to send data when not connected. Skipping.') + return + if self._needs_to_send_birth: + return self.send_birth() + if aliases is None: + aliases = range(len(self._tags)) + if changed_only: + aliases = [x for x in aliases if self._tags[x].changed_since_last_sent()] + if len(aliases) == 0: + return + + tx_payload = self._get_payload(aliases, False) + topic = self._get_topic('DATA') + return self._mqtt_client.publish(topic, + tx_payload.SerializeToString()) + + def get_watched_topic(self): + """Return the MQTT topic string on which this device expects to receive messages""" + return self._get_topic('CMD') + + def _handle_payload(self, topic, payload): + """ + Handle a received Sparkplug payload + + Returns true/false to indicate whether this device was the intended recipient and the message was handled + + :param topic: MQTT topic string the payload was received on + :param payload: a sparkplug_b_pb2.Payload object containing the decoded payload + + """ + # Check if topic is for this device + watched_topic = self.get_watched_topic() + if topic != watched_topic: + return False + local_names = self.get_tag_names() + for pm in payload.metrics: + if pm.HasField('alias'): + if pm.alias >= len(self._tags): + self._logger.warning('Invalid alias {} for this device. Skipping metric.'.format(pm.alias)) + continue + # TODO - If a "name" field was also provided, confirm it matches the expected? + self._tags[pm.alias]._handle_sparkplug_command(pm) + elif pm.HasField('name'): + if not pm.name in local_names: + self._logger.warning('Invalid name {} for this device. Skipping metric.'.format(pm.name)) + continue + self._tags[local_names.index(pm.name)]._handle_sparkplug_command(pm) + else: + self._logger.warning('No name or alias provided. Skipping metric.') + continue + # Even if the payload was corrupt/weird, the topic was for us. + # We can return True to let the caller know it was handled + return True + + def is_connected(self): + """Return true/false to indicate if MQTT connection is fully established and ready to use""" + raise NotImplementedError('is_connected not implemented on this class') + + +class Node(_AbstractBaseDevice): + """ + An object to manage a Sparkplug edge node, including metrics and subdevices and MQTT client connections + """ + def __init__(self, mqtt_params, group_id, edge_node_id, + provide_bdSeq=True, provide_controls=True, logger=None, + u32_in_long=False): + """ + Initializer method for Node + + :param mqtt_params: list of one or more tahu.mqtt_params objects containing MQTT client configurations + :param group_id: string to use as group ID in MQTT Sparkplug topics + :param edge_node_id: string to use as edge node ID in MQTT Sparkplug topics + :param provide_bdSeq: optional boolean to indicate if bdSeq metric should be created/used according to Sparkplug spec (Default value = True) + :param provide_controls: optional boolean to indicate if well-known control metrics "Node Control/Rebirth" and "Node Control/Next Server" should be created/used according to Sparkplug reference implementations (Default value = True) + :param logger: optional logging.logger object to handle log messages from this device and all objects attached to it. if None then a new one is created. (Default value = None) + :param u32_in_long: optional boolean to indicate if metrics/properties with datatype UINT32 should put their values in the long_value or int_value part of the payload. (Default value = False) + + """ + super().__init__() + self._mqtt_params = list(mqtt_params) + self._mqtt_param_index = 0 + self._u32_in_long = bool(u32_in_long) + self._group_id = str(group_id) + self._edge_node_id = str(edge_node_id) + node_reference = '{}_{}'.format(self._group_id, self._edge_node_id) + self._logger = logger if logger else logging.getLogger(node_reference) + self._mqtt_logger = self._logger.getChild('mqtt') + self._init_mqtt_client() + self._sequence = 0 + self._subdevices = [] + self._all_device_topics = [ self.get_watched_topic() ] + self._thread = None + self._thread_terminate = True + self._reconnect_client = False + + if provide_bdSeq: + # We use the timestamp as our bdSeq since we do not have a persistent counter + new_tag = Metric(self, 'bdSeq', + tahu.DataType.Int64, + value=tahu.timestamp_to_sparkplug()) + self._bdseq_alias = new_tag.alias + else: + self._bdseq_alias = None + if provide_controls: + # We do not support "Node Control/Reboot" since we can't reboot ourselves easily + #Metric(self, 'Node Control/Reboot', tahu.DataType.Boolean, value=False) + Metric(self, 'Node Control/Rebirth', + tahu.DataType.Boolean, value=False, + cmd_handler=_rebirth_command_handler) + Metric(self, 'Node Control/Next Server', + tahu.DataType.Boolean, value=False, + cmd_handler=_next_server_command_handler) + + def _get_next_seq(self): + """Returns the Sparkplug `seq` number to use on the next publish""" + seq_to_use = self._sequence + self._sequence = (self._sequence + 1) % 256 + return seq_to_use + + def send_birth(self): + """ + Generate and send a birth message for this device. + + Will trigger births on subdevices as needed. + + Returns the result of calling the `publish` function of the MQTT client library (paho) + + """ + if not self.is_connected(): + self._logger.warning('Trying to send birth when not connected. Skipping.') + return + self._sequence = 0 + tx_payload = self._get_payload(None, True) + topic = self._get_topic('BIRTH') + pub_result = self._mqtt_client.publish(topic, + tx_payload.SerializeToString()) + if pub_result.rc != 0: + return pub_result + self._needs_to_send_birth = False + for d in self._subdevices: + d._needs_to_send_birth = True + return pub_result + + def _get_death_payload(self, will): + """ + Create and return a Sparkplug Payload DEATH message for this device + + :param will: boolean to indicate if this message will be used as the MQTT LWT. if True, bdSeq will be updated if available. + + """ + if self._bdseq_alias is not None: + if will: + # We use the timestamp as our bdSeq since we do not have a persistent counter + new_bdseq = tahu.timestamp_to_sparkplug() + self._logger.debug('Generating new WILL bdSeq={}'.format(new_bdseq)) + self._tags[self._bdseq_alias].change_value(new_bdseq, + send_immediate=False) + death_payload = self._get_payload([self._bdseq_alias], False) + # This timestamp would be wrong when finally sent, so we just remove it + death_payload.ClearField('timestamp') + # To be safe, add the name to the bdSeq metric and don't use the alias + death_payload.metrics[0].name = 'bdSeq' + death_payload.metrics[0].ClearField('alias') + else: + death_payload = self._get_payload([], False) + return death_payload + + def _get_will_topic_and_payload(self): + """Returns a tuple of the MQTT topic and payload bytes to use as the LWT for this edge node device""" + tx_payload = self._get_death_payload(will=True) + topic = self._get_topic('DEATH') + return topic, tx_payload.SerializeToString() + + def send_death(self): + """ + Generate and send a death message for this device. + + Will flag device(s) to send birth on next update as needed. + + Returns the result of calling the `publish` function of the MQTT client library (paho) + + """ + if not self.is_connected(): + self._logger.warning('Trying to send death when not connected. Skipping.') + return + tx_payload = self._get_death_payload(will=False) + topic = self._get_topic('DEATH') + pub_result = self._mqtt_client.publish(topic, + tx_payload.SerializeToString()) + # Even if this publish didn't succeed, it's safer to rebirth unnecessarily... + self._needs_to_send_birth = True + for d in self._subdevices: + d.needs_to_birth = True + return pub_result + + def _attach_subdevice(self, subdevice): + """ + Attach a newly create subdevice to this parent device + + Returns the new subdevice's handle from the parent device context. + + :param subdevice: newly created Device object to attach + """ + next_index = len(self._subdevices) + self._subdevices.append(subdevice) + self._all_device_topics.append(subdevice.get_watched_topic()) + if self.is_connected(): + self.send_death() + self._needs_to_send_birth = True + return next_index + + # TODO - Add another function to remove a subdevice + + def _get_topic(self, cmd_type): + """ + Return the topic string to use for a command of the type given on this device object + + Not normally called directly. + + :param cmd_type: string indicating message type; usually one of 'BIRTH', 'DEATH', 'DATA', or 'CMD' + + """ + return 'spBv1.0/{}/N{}/{}'.format(self._group_id, cmd_type, + self._edge_node_id) + + def _mqtt_subscribe(self): + """ + Activate MQTT subscriptions for the Node and all Device sub-devices + + This builds the proper topic lists and triggers the subscription command to the MQTT broker. + + Returns the result of calling Paho `subscribe` API command + """ + # TODO - Add support for 'STATE/#' monitoring and holdoff? + # Subscribe to all topics for commands related to this device... + ncmd_subscription = 'spBv1.0/{}/NCMD/{}/#'.format(self._group_id, + self._edge_node_id) + dcmd_subscription = 'spBv1.0/{}/DCMD/{}/#'.format(self._group_id, + self._edge_node_id) + desired_qos = 0 + topic = [(ncmd_subscription,desired_qos), (dcmd_subscription, + desired_qos)] + return self._mqtt_client.subscribe(topic) + + def _mqtt_on_connect(self, client, userdata, flags, rc): + """Callback handler for Paho (MQTT) on_connect events""" + if rc != 0: + self._logger.warning('MQTT connect error rc={}'.format(rc)) + return + self._is_connected = True + # A fresh connection implies we have no subscriptions and need to birth + self._is_subscribed = False + self._needs_to_send_birth = True + for d in self._subdevices: + d._needs_to_send_birth = True + self._mqtt_subscribe() + + def _mqtt_on_disconnect(self, client, userdata, rc): + """Callback handler for Paho (MQTT) on_disconnect events""" + self._logger.warning('MQTT disconnect rc={}'.format(rc)) + self._is_connected = False + # The thread loop will try reconnecting for us, we just need to setup a new will first + will_topic, will_payload = self._get_will_topic_and_payload() + client.will_set(will_topic, will_payload) + + def _mqtt_on_message(self, client, userdata, message): + """Callback handler for Paho (MQTT) on_message events""" + if message.topic in self._all_device_topics: + rx_payload = sparkplug_b_pb2.Payload() + rx_payload.ParseFromString(message.payload) + handler_index = self._all_device_topics.index(message.topic) + if handler_index == 0: + self._handle_payload(message.topic, rx_payload) + else: + self._subdevices[(handler_index + - 1)]._handle_payload(message.topic, + rx_payload) + else: + self._logger.info('Ignoring MQTT message on topic {}'.format(message.topic)) + + def _mqtt_on_subscribe(self, client, userdata, mid, granted_qos): + """Callback handler for Paho (MQTT) on_subscribe events""" + # TODO - Confirm the mid matches our subscription request before assuming we're good to go? + self._is_subscribed = True + + def _init_mqtt_client(self, reinit=False): + """Used to initialize MQTT client from nothing or with reinit=True to forcefully abort a connection and trigger NDEATH LWT payload from MQTT broker""" + curr_params = self._mqtt_params[self._mqtt_param_index] + if curr_params['client_id']: + self._client_id = curr_params['client_id'] + else: + self._client_id = '{}_{}_{}'.format(self._group_id, + self._edge_node_id, + os.getpid()) + self._logger.info('Initializing MQTT client (client_id={} reinit={})'.format(self._client_id, + reinit)) + if reinit: + self._mqtt_client.reinitialise(client_id=self._client_id) + else: + self._mqtt_client = mqtt.Client(client_id=self._client_id) + self._mqtt_client.enable_logger(self._mqtt_logger) + self._mqtt_client.on_connect = self._mqtt_on_connect + self._mqtt_client.on_disconnect = self._mqtt_on_disconnect + self._mqtt_client.on_message = self._mqtt_on_message + self._mqtt_client.on_subscribe = self._mqtt_on_subscribe + self._is_connected = False + self._is_subscribed = False + + def _prep_client_connection(self): + """Used to configure and start a MQTT client connection""" + if self._is_connected: + self._logger.error('Attempting to start MQTT connection while already connected. Skipping.') + return + curr_params = self._mqtt_params[self._mqtt_param_index] + if curr_params['username']: + self._mqtt_client.username_pw_set(curr_params['username'], + curr_params['password']) + if (curr_params['port'] + == 1883 and curr_params['tls_enabled']) or (curr_params['port'] + == 8883 and not curr_params['tls_enabled']): + self._logger.warning('Setting up MQTT params on well-known port with unexpected TLS setting. Are you sure you meant to do this?') + if curr_params['tls_enabled']: + self._mqtt_client.tls_set(ca_certs=curr_params['ca_certs'], + certfile=curr_params['certfile'], + keyfile=curr_params['keyfile']) + will_topic, will_payload = self._get_will_topic_and_payload() + self._mqtt_client.will_set(will_topic, will_payload) + self._logger.info('Starting MQTT client connection to host={}'.format(curr_params['server'])) + self._mqtt_client.connect(host=curr_params['server'], + port=curr_params['port'], + keepalive=curr_params['keepalive']) + + def _thread_main(self): + """ + Thread worker loop that coordinates all MQTT recv operations and application interactions + + It maintains MQTT broker connections and handles birth/death of devices and sub-devices as needed. + """ + # TODO - Add support to timeout bad/failed connections to trigger _reconnect_client + self._logger.info('MQTT thread started...') + self._prep_client_connection() + while not self._thread_terminate: + self._mqtt_client.loop() + if self._reconnect_client: + self._reconnect_client = False + self._init_mqtt_client(reinit=True) + self._prep_client_connection() + elif self.is_connected(): + if self._needs_to_send_birth: + self.send_birth() + else: + # Only try to send subdevice births if the top-level device doesn't need it + for d in self._subdevices: + if d._needs_to_send_birth: + d.send_birth() + # Use the reinit as a trick to force the sockets closed + self._init_mqtt_client(reinit=True) + self._logger.info('MQTT thread stopped...') + + def online(self): + """ + Request Node go online if not already + + Starts a new worker thread if needed. + """ + if self._thread is not None: + self._logger.warning('MQTT thread already running!') + return + self._thread_terminate = False + self._thread = threading.Thread(target=self._thread_main) + self._thread.daemon = True + self._thread.start() + + def offline(self): + """ + Request Node go offline if not already + + Blocks until worker thread is stopped if not run from that thread + """ + self._logger.info('Requesting MQTT thread stop...') + self._thread_terminate = True + if self._thread is None: + self._logger.warning('MQTT thread not running!') + elif threading.current_thread() != self._thread: + self._thread.join() + self._thread = None + + def is_connected(self): + """Returns True if Node is properly connected to a MQTT broker""" + return self._is_connected and self._is_subscribed + + +class Device(_AbstractBaseDevice): + """An object to manage a Sparkplug sub-device, including metrics and parent edge node reference""" + def __init__(self, parent_device, name): + super().__init__() + # TODO - Protect the name from being changed after creation + self.name = str(name) + self._parent_device = parent_device + self._logger = parent_device._logger.getChild(self.name) + self._mqtt_client = parent_device._mqtt_client + self._mqtt_logger = parent_device._mqtt_logger + self._u32_in_long = parent_device._u32_in_long + self._parent_index = self._parent_device._attach_subdevice(self) + + def _get_next_seq(self): + """Returns the Sparkplug `seq` number to use on the next publish""" + return self._parent_device._get_next_seq() + + def send_birth(self): + """ + Generate and send a birth message for this device. + + Will trigger births on parent device as needed. + + Returns the result of calling the `publish` function of the MQTT client library (paho) + + """ + if not self.is_connected(): + self._logger.warning('Trying to send birth when not connected. Skipping.') + return + # If the parent device also needs to birth, do that first! + if self._parent_device._needs_to_send_birth: + return self._parent_device.send_birth() + tx_payload = self._get_payload(None, True) + topic = self._get_topic('BIRTH') + pub_result = self._mqtt_client.publish(topic, + tx_payload.SerializeToString()) + if pub_result.rc == 0: + self._needs_to_send_birth = False + return pub_result + + def send_death(self): + """ + Generate and send a death message for this device. + + Will flag device(s) to send birth on next update as needed. + + Returns the result of calling the `publish` function of the MQTT client library (paho) + + """ + if not self.is_connected(): + self._logger.warning('Trying to send death when not connected. Skipping.') + return + tx_payload = self._get_payload([], False) + topic = self._get_topic('DEATH') + pub_result = self._mqtt_client.publish(topic, + tx_payload.SerializeToString()) + # Even if this publish didn't succeed, it's safer to rebirth unnecessarily... + self._needs_to_send_birth = True + return pub_result + + def _get_topic(self, cmd_type): + """ + Return the topic string to use for a command of the type given on this device object + + Not normally called directly. + + :param cmd_type: string indicating message type; usually one of 'BIRTH', 'DEATH', 'DATA', or 'CMD' + + """ + return 'spBv1.0/{}/D{}/{}/{}'.format(self._parent_device._group_id, + cmd_type, + self._parent_device._edge_node_id, + self.name) + + def is_connected(self): + """Returns True if the parent Node is properly connected to a MQTT broker""" + return self._parent_device.is_connected() + + +class IgnitionQualityCode(enum.IntEnum): + """A list of values for the quality property that are understood by Ignition""" + Bad = -2147483136 + Bad_AccessDenied = -2147483134 + Bad_AggregateNotFound = -2147483127 + Bad_DatabaseNotConnected = -2147483123 + Bad_Disabled = -2147483133 + Bad_Failure = -2147483121 + Bad_GatewayCommOff = -2147483125 + Bad_LicenseExceeded = -2147483130 + Bad_NotConnected = -2147483126 + Bad_NotFound = -2147483129 + Bad_OutOfRange = -2147483124 + Bad_ReadOnly = -2147483122 + Bad_ReferenceNotFound = -2147483128 + Bad_Stale = -2147483132 + Bad_TrialExpired = -2147483131 + Bad_Unauthorized = -2147483135 + Bad_Unsupported = -2147483120 + Error = -1073741056 + Error_Configuration = -1073741055 + Error_CycleDetected = -1073741044 + Error_DatabaseQuery = -1073741051 + Error_Exception = -1073741048 + Error_ExpressionEval = -1073741054 + Error_Formatting = -1073741046 + Error_IO = -1073741050 + Error_InvalidPathSyntax = -1073741047 + Error_ScriptEval = -1073741045 + Error_TagExecution = -1073741053 + Error_TimeoutExpired = -1073741049 + Error_TypeConversion = -1073741052 + Good = 192 + Good_Initial = 201 + Good_Provisional = 200 + Good_Unspecified = 0 + Good_WritePending = 2 + Uncertain = 1073742080 + Uncertain_DataSubNormal = 1073742083 + Uncertain_EngineeringUnitsExceeded = 1073742084 + Uncertain_IncompleteOperation = 1073742085 + Uncertain_InitialValue = 1073742082 + Uncertain_LastKnownValue = 1073742081 + + +def ignition_quality_property(parent_metric, value=IgnitionQualityCode.Good): + """ + Create a dynamic tag quality property that will be understood by Ignition + + :param parent_metric: the Metric object to attach the property to + :param value: the current ignition.QualityCode value (Default value = ignition.QualityCode.Good) + + """ + return MetricProperty(parent_metric, 'Quality', + tahu.DataType.Int32, value, + True) + +def ignition_low_property(parent_metric, value): + """ + Create a tag low-range (engLow) property that will be understood by Ignition + + Uses the same Sparkplug datatype on the property as the parent metric + + :param parent_metric: the Metric object to attach the property to + :param value: the low range of the metric + + """ + return MetricProperty(parent_metric, 'engLow', + parent_metric._datatype, + value, False) + +def ignition_high_property(parent_metric, value): + """ + Create a tag high-range (engHigh) property that will be understood by Ignition + + Uses the same Sparkplug datatype on the property as the parent metric + + :param parent_metric: the Metric object to attach the property to + :param value: the high range of the metric + + """ + return MetricProperty(parent_metric, 'engHigh', + parent_metric._datatype, + value, False) + +def ignition_unit_property(parent_metric, value): + """ + Create a tag units (engUnit) property that will be understood by Ignition + + :param parent_metric: the Metric object to attach the property to + :param value: the units of the metric + + """ + return MetricProperty(parent_metric, 'engUnit', + tahu.DataType.String, value, + False) + +def ignition_documentation_property(parent_metric, value): + """ + Create a tag documentation property that will be understood by Ignition + + :param parent_metric: the Metric object to attach the property to + :param value: the documentation of the metric + + """ + return MetricProperty(parent_metric, 'Documentation', + tahu.DataType.String, value, + False) + diff --git a/python/core/sparkplug_b_pb2.py b/python/core/tahu/sparkplug_b_pb2.py similarity index 83% rename from python/core/sparkplug_b_pb2.py rename to python/core/tahu/sparkplug_b_pb2.py index a245846f..4ed53ef0 100644 --- a/python/core/sparkplug_b_pb2.py +++ b/python/core/tahu/sparkplug_b_pb2.py @@ -3,11 +3,11 @@ import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -19,10 +19,200 @@ name='sparkplug_b.proto', package='org.eclipse.tahu.protobuf', syntax='proto2', - serialized_pb=_b('\n\x11sparkplug_b.proto\x12\x19org.eclipse.tahu.protobuf\"\xee\x15\n\x07Payload\x12\x11\n\ttimestamp\x18\x01 \x01(\x04\x12:\n\x07metrics\x18\x02 \x03(\x0b\x32).org.eclipse.tahu.protobuf.Payload.Metric\x12\x0b\n\x03seq\x18\x03 \x01(\x04\x12\x0c\n\x04uuid\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a\xa6\x04\n\x08Template\x12\x0f\n\x07version\x18\x01 \x01(\t\x12:\n\x07metrics\x18\x02 \x03(\x0b\x32).org.eclipse.tahu.protobuf.Payload.Metric\x12I\n\nparameters\x18\x03 \x03(\x0b\x32\x35.org.eclipse.tahu.protobuf.Payload.Template.Parameter\x12\x14\n\x0ctemplate_ref\x18\x04 \x01(\t\x12\x15\n\ris_definition\x18\x05 \x01(\x08\x1a\xca\x02\n\tParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\r\x12\x13\n\tint_value\x18\x03 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x04 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x05 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x06 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x07 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x08 \x01(\tH\x00\x12h\n\x0f\x65xtension_value\x18\t \x01(\x0b\x32M.org.eclipse.tahu.protobuf.Payload.Template.Parameter.ParameterValueExtensionH\x00\x1a#\n\x17ParameterValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value*\x08\x08\x06\x10\x80\x80\x80\x80\x02\x1a\x97\x04\n\x07\x44\x61taSet\x12\x16\n\x0enum_of_columns\x18\x01 \x01(\x04\x12\x0f\n\x07\x63olumns\x18\x02 \x03(\t\x12\r\n\x05types\x18\x03 \x03(\r\x12<\n\x04rows\x18\x04 \x03(\x0b\x32..org.eclipse.tahu.protobuf.Payload.DataSet.Row\x1a\xaf\x02\n\x0c\x44\x61taSetValue\x12\x13\n\tint_value\x18\x01 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x02 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x05 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12h\n\x0f\x65xtension_value\x18\x07 \x01(\x0b\x32M.org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.DataSetValueExtensionH\x00\x1a!\n\x15\x44\x61taSetValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value\x1aZ\n\x03Row\x12I\n\x08\x65lements\x18\x01 \x03(\x0b\x32\x37.org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue*\x08\x08\x02\x10\x80\x80\x80\x80\x02*\x08\x08\x05\x10\x80\x80\x80\x80\x02\x1a\xe9\x03\n\rPropertyValue\x12\x0c\n\x04type\x18\x01 \x01(\r\x12\x0f\n\x07is_null\x18\x02 \x01(\x08\x12\x13\n\tint_value\x18\x03 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x04 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x05 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x06 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x07 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x08 \x01(\tH\x00\x12K\n\x11propertyset_value\x18\t \x01(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySetH\x00\x12P\n\x12propertysets_value\x18\n \x01(\x0b\x32\x32.org.eclipse.tahu.protobuf.Payload.PropertySetListH\x00\x12\x62\n\x0f\x65xtension_value\x18\x0b \x01(\x0b\x32G.org.eclipse.tahu.protobuf.Payload.PropertyValue.PropertyValueExtensionH\x00\x1a\"\n\x16PropertyValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value\x1ag\n\x0bPropertySet\x12\x0c\n\x04keys\x18\x01 \x03(\t\x12@\n\x06values\x18\x02 \x03(\x0b\x32\x30.org.eclipse.tahu.protobuf.Payload.PropertyValue*\x08\x08\x03\x10\x80\x80\x80\x80\x02\x1a`\n\x0fPropertySetList\x12\x43\n\x0bpropertyset\x18\x01 \x03(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySet*\x08\x08\x02\x10\x80\x80\x80\x80\x02\x1a\xa4\x01\n\x08MetaData\x12\x15\n\ris_multi_part\x18\x01 \x01(\x08\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\t\x12\x0c\n\x04size\x18\x03 \x01(\x04\x12\x0b\n\x03seq\x18\x04 \x01(\x04\x12\x11\n\tfile_name\x18\x05 \x01(\t\x12\x11\n\tfile_type\x18\x06 \x01(\t\x12\x0b\n\x03md5\x18\x07 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x08 \x01(\t*\x08\x08\t\x10\x80\x80\x80\x80\x02\x1a\xbf\x05\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05\x61lias\x18\x02 \x01(\x04\x12\x11\n\ttimestamp\x18\x03 \x01(\x04\x12\x10\n\x08\x64\x61tatype\x18\x04 \x01(\r\x12\x15\n\ris_historical\x18\x05 \x01(\x08\x12\x14\n\x0cis_transient\x18\x06 \x01(\x08\x12\x0f\n\x07is_null\x18\x07 \x01(\x08\x12=\n\x08metadata\x18\x08 \x01(\x0b\x32+.org.eclipse.tahu.protobuf.Payload.MetaData\x12\x42\n\nproperties\x18\t \x01(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySet\x12\x13\n\tint_value\x18\n \x01(\rH\x00\x12\x14\n\nlong_value\x18\x0b \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x0c \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\r \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x0e \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x0f \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x10 \x01(\x0cH\x00\x12\x43\n\rdataset_value\x18\x11 \x01(\x0b\x32*.org.eclipse.tahu.protobuf.Payload.DataSetH\x00\x12\x45\n\x0etemplate_value\x18\x12 \x01(\x0b\x32+.org.eclipse.tahu.protobuf.Payload.TemplateH\x00\x12Y\n\x0f\x65xtension_value\x18\x13 \x01(\x0b\x32>.org.eclipse.tahu.protobuf.Payload.Metric.MetricValueExtensionH\x00\x1a \n\x14MetricValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value*\x08\x08\x06\x10\x80\x80\x80\x80\x02\x42,\n\x19org.eclipse.tahu.protobufB\x0fSparkplugBProto') + serialized_options=_b('\n\031org.eclipse.tahu.protobufB\017SparkplugBProto'), + serialized_pb=_b('\n\x11sparkplug_b.proto\x12\x19org.eclipse.tahu.protobuf\"\xee\x15\n\x07Payload\x12\x11\n\ttimestamp\x18\x01 \x01(\x04\x12:\n\x07metrics\x18\x02 \x03(\x0b\x32).org.eclipse.tahu.protobuf.Payload.Metric\x12\x0b\n\x03seq\x18\x03 \x01(\x04\x12\x0c\n\x04uuid\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a\xa6\x04\n\x08Template\x12\x0f\n\x07version\x18\x01 \x01(\t\x12:\n\x07metrics\x18\x02 \x03(\x0b\x32).org.eclipse.tahu.protobuf.Payload.Metric\x12I\n\nparameters\x18\x03 \x03(\x0b\x32\x35.org.eclipse.tahu.protobuf.Payload.Template.Parameter\x12\x14\n\x0ctemplate_ref\x18\x04 \x01(\t\x12\x15\n\ris_definition\x18\x05 \x01(\x08\x1a\xca\x02\n\tParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\r\x12\x13\n\tint_value\x18\x03 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x04 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x05 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x06 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x07 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x08 \x01(\tH\x00\x12h\n\x0f\x65xtension_value\x18\t \x01(\x0b\x32M.org.eclipse.tahu.protobuf.Payload.Template.Parameter.ParameterValueExtensionH\x00\x1a#\n\x17ParameterValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value*\x08\x08\x06\x10\x80\x80\x80\x80\x02\x1a\x97\x04\n\x07\x44\x61taSet\x12\x16\n\x0enum_of_columns\x18\x01 \x01(\x04\x12\x0f\n\x07\x63olumns\x18\x02 \x03(\t\x12\r\n\x05types\x18\x03 \x03(\r\x12<\n\x04rows\x18\x04 \x03(\x0b\x32..org.eclipse.tahu.protobuf.Payload.DataSet.Row\x1a\xaf\x02\n\x0c\x44\x61taSetValue\x12\x13\n\tint_value\x18\x01 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x02 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x05 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12h\n\x0f\x65xtension_value\x18\x07 \x01(\x0b\x32M.org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.DataSetValueExtensionH\x00\x1a!\n\x15\x44\x61taSetValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value\x1aZ\n\x03Row\x12I\n\x08\x65lements\x18\x01 \x03(\x0b\x32\x37.org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue*\x08\x08\x02\x10\x80\x80\x80\x80\x02*\x08\x08\x05\x10\x80\x80\x80\x80\x02\x1a\xe9\x03\n\rPropertyValue\x12\x0c\n\x04type\x18\x01 \x01(\r\x12\x0f\n\x07is_null\x18\x02 \x01(\x08\x12\x13\n\tint_value\x18\x03 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x04 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x05 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x06 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x07 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x08 \x01(\tH\x00\x12K\n\x11propertyset_value\x18\t \x01(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySetH\x00\x12P\n\x12propertysets_value\x18\n \x01(\x0b\x32\x32.org.eclipse.tahu.protobuf.Payload.PropertySetListH\x00\x12\x62\n\x0f\x65xtension_value\x18\x0b \x01(\x0b\x32G.org.eclipse.tahu.protobuf.Payload.PropertyValue.PropertyValueExtensionH\x00\x1a\"\n\x16PropertyValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value\x1ag\n\x0bPropertySet\x12\x0c\n\x04keys\x18\x01 \x03(\t\x12@\n\x06values\x18\x02 \x03(\x0b\x32\x30.org.eclipse.tahu.protobuf.Payload.PropertyValue*\x08\x08\x03\x10\x80\x80\x80\x80\x02\x1a`\n\x0fPropertySetList\x12\x43\n\x0bpropertyset\x18\x01 \x03(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySet*\x08\x08\x02\x10\x80\x80\x80\x80\x02\x1a\xa4\x01\n\x08MetaData\x12\x15\n\ris_multi_part\x18\x01 \x01(\x08\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\t\x12\x0c\n\x04size\x18\x03 \x01(\x04\x12\x0b\n\x03seq\x18\x04 \x01(\x04\x12\x11\n\tfile_name\x18\x05 \x01(\t\x12\x11\n\tfile_type\x18\x06 \x01(\t\x12\x0b\n\x03md5\x18\x07 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x08 \x01(\t*\x08\x08\t\x10\x80\x80\x80\x80\x02\x1a\xbf\x05\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05\x61lias\x18\x02 \x01(\x04\x12\x11\n\ttimestamp\x18\x03 \x01(\x04\x12\x10\n\x08\x64\x61tatype\x18\x04 \x01(\r\x12\x15\n\ris_historical\x18\x05 \x01(\x08\x12\x14\n\x0cis_transient\x18\x06 \x01(\x08\x12\x0f\n\x07is_null\x18\x07 \x01(\x08\x12=\n\x08metadata\x18\x08 \x01(\x0b\x32+.org.eclipse.tahu.protobuf.Payload.MetaData\x12\x42\n\nproperties\x18\t \x01(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySet\x12\x13\n\tint_value\x18\n \x01(\rH\x00\x12\x14\n\nlong_value\x18\x0b \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x0c \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\r \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x0e \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x0f \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x10 \x01(\x0cH\x00\x12\x43\n\rdataset_value\x18\x11 \x01(\x0b\x32*.org.eclipse.tahu.protobuf.Payload.DataSetH\x00\x12\x45\n\x0etemplate_value\x18\x12 \x01(\x0b\x32+.org.eclipse.tahu.protobuf.Payload.TemplateH\x00\x12Y\n\x0f\x65xtension_value\x18\x13 \x01(\x0b\x32>.org.eclipse.tahu.protobuf.Payload.Metric.MetricValueExtensionH\x00\x1a \n\x14MetricValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value*\x08\x08\x06\x10\x80\x80\x80\x80\x02*\xf2\x03\n\x08\x44\x61taType\x12\x0b\n\x07Unknown\x10\x00\x12\x08\n\x04Int8\x10\x01\x12\t\n\x05Int16\x10\x02\x12\t\n\x05Int32\x10\x03\x12\t\n\x05Int64\x10\x04\x12\t\n\x05UInt8\x10\x05\x12\n\n\x06UInt16\x10\x06\x12\n\n\x06UInt32\x10\x07\x12\n\n\x06UInt64\x10\x08\x12\t\n\x05\x46loat\x10\t\x12\n\n\x06\x44ouble\x10\n\x12\x0b\n\x07\x42oolean\x10\x0b\x12\n\n\x06String\x10\x0c\x12\x0c\n\x08\x44\x61teTime\x10\r\x12\x08\n\x04Text\x10\x0e\x12\x08\n\x04UUID\x10\x0f\x12\x0b\n\x07\x44\x61taSet\x10\x10\x12\t\n\x05\x42ytes\x10\x11\x12\x08\n\x04\x46ile\x10\x12\x12\x0c\n\x08Template\x10\x13\x12\x0f\n\x0bPropertySet\x10\x14\x12\x13\n\x0fPropertySetList\x10\x15\x12\r\n\tInt8Array\x10\x16\x12\x0e\n\nInt16Array\x10\x17\x12\x0e\n\nInt32Array\x10\x18\x12\x0e\n\nInt64Array\x10\x19\x12\x0e\n\nUInt8Array\x10\x1a\x12\x0f\n\x0bUInt16Array\x10\x1b\x12\x0f\n\x0bUInt32Array\x10\x1c\x12\x0f\n\x0bUInt64Array\x10\x1d\x12\x0e\n\nFloatArray\x10\x1e\x12\x0f\n\x0b\x44oubleArray\x10\x1f\x12\x10\n\x0c\x42ooleanArray\x10 \x12\x0f\n\x0bStringArray\x10!\x12\x11\n\rDateTimeArray\x10\"B,\n\x19org.eclipse.tahu.protobufB\x0fSparkplugBProto') ) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) +_DATATYPE = _descriptor.EnumDescriptor( + name='DataType', + full_name='org.eclipse.tahu.protobuf.DataType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='Unknown', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Int8', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Int16', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Int32', index=3, number=3, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Int64', index=4, number=4, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UInt8', index=5, number=5, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UInt16', index=6, number=6, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UInt32', index=7, number=7, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UInt64', index=8, number=8, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Float', index=9, number=9, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Double', index=10, number=10, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Boolean', index=11, number=11, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='String', index=12, number=12, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DateTime', index=13, number=13, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Text', index=14, number=14, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UUID', index=15, number=15, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DataSet', index=16, number=16, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Bytes', index=17, number=17, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='File', index=18, number=18, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Template', index=19, number=19, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PropertySet', index=20, number=20, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PropertySetList', index=21, number=21, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Int8Array', index=22, number=22, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Int16Array', index=23, number=23, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Int32Array', index=24, number=24, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Int64Array', index=25, number=25, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UInt8Array', index=26, number=26, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UInt16Array', index=27, number=27, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UInt32Array', index=28, number=28, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UInt64Array', index=29, number=29, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FloatArray', index=30, number=30, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DoubleArray', index=31, number=31, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BooleanArray', index=32, number=32, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='StringArray', index=33, number=33, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DateTimeArray', index=34, number=34, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=2850, + serialized_end=3348, +) +_sym_db.RegisterEnumDescriptor(_DATATYPE) + +DataType = enum_type_wrapper.EnumTypeWrapper(_DATATYPE) +Unknown = 0 +Int8 = 1 +Int16 = 2 +Int32 = 3 +Int64 = 4 +UInt8 = 5 +UInt16 = 6 +UInt32 = 7 +UInt64 = 8 +Float = 9 +Double = 10 +Boolean = 11 +String = 12 +DateTime = 13 +Text = 14 +UUID = 15 +DataSet = 16 +Bytes = 17 +File = 18 +Template = 19 +PropertySet = 20 +PropertySetList = 21 +Int8Array = 22 +Int16Array = 23 +Int32Array = 24 +Int64Array = 25 +UInt8Array = 26 +UInt16Array = 27 +UInt32Array = 28 +UInt64Array = 29 +FloatArray = 30 +DoubleArray = 31 +BooleanArray = 32 +StringArray = 33 +DateTimeArray = 34 @@ -39,7 +229,7 @@ nested_types=[], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(1, 536870912), ], @@ -62,70 +252,70 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='type', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.type', index=1, number=2, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='int_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.int_value', index=2, number=3, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='long_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.long_value', index=3, number=4, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='float_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.float_value', index=4, number=5, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='double_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.double_value', index=5, number=6, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='boolean_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.boolean_value', index=6, number=7, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='string_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.string_value', index=7, number=8, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='extension_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.extension_value', index=8, number=9, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION, ], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], @@ -151,42 +341,42 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='metrics', full_name='org.eclipse.tahu.protobuf.Payload.Template.metrics', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='parameters', full_name='org.eclipse.tahu.protobuf.Payload.Template.parameters', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='template_ref', full_name='org.eclipse.tahu.protobuf.Payload.Template.template_ref', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='is_definition', full_name='org.eclipse.tahu.protobuf.Payload.Template.is_definition', index=4, number=5, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_PAYLOAD_TEMPLATE_PARAMETER, ], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(6, 536870912), ], @@ -209,7 +399,7 @@ nested_types=[], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(1, 536870912), ], @@ -232,56 +422,56 @@ has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='long_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.long_value', index=1, number=2, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='float_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.float_value', index=2, number=3, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='double_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.double_value', index=3, number=4, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='boolean_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.boolean_value', index=4, number=5, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='string_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.string_value', index=5, number=6, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='extension_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.extension_value', index=6, number=7, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION, ], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], @@ -307,14 +497,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(2, 536870912), ], @@ -337,35 +527,35 @@ has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='columns', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.columns', index=1, number=2, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='types', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.types', index=2, number=3, type=13, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='rows', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.rows', index=3, number=4, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_PAYLOAD_DATASET_DATASETVALUE, _PAYLOAD_DATASET_ROW, ], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(5, 536870912), ], @@ -388,7 +578,7 @@ nested_types=[], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(1, 536870912), ], @@ -411,84 +601,84 @@ has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='is_null', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.is_null', index=1, number=2, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='int_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.int_value', index=2, number=3, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='long_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.long_value', index=3, number=4, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='float_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.float_value', index=4, number=5, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='double_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.double_value', index=5, number=6, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='boolean_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.boolean_value', index=6, number=7, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='string_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.string_value', index=7, number=8, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='propertyset_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.propertyset_value', index=8, number=9, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='propertysets_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.propertysets_value', index=9, number=10, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='extension_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.extension_value', index=10, number=11, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION, ], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], @@ -514,21 +704,21 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='values', full_name='org.eclipse.tahu.protobuf.Payload.PropertySet.values', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(3, 536870912), ], @@ -551,14 +741,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(2, 536870912), ], @@ -581,63 +771,63 @@ has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='content_type', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.content_type', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='size', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.size', index=2, number=3, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='seq', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.seq', index=3, number=4, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='file_name', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.file_name', index=4, number=5, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='file_type', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.file_type', index=5, number=6, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='md5', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.md5', index=6, number=7, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='description', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.description', index=7, number=8, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(9, 536870912), ], @@ -660,7 +850,7 @@ nested_types=[], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(1, 536870912), ], @@ -683,140 +873,140 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='alias', full_name='org.eclipse.tahu.protobuf.Payload.Metric.alias', index=1, number=2, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='timestamp', full_name='org.eclipse.tahu.protobuf.Payload.Metric.timestamp', index=2, number=3, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='datatype', full_name='org.eclipse.tahu.protobuf.Payload.Metric.datatype', index=3, number=4, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='is_historical', full_name='org.eclipse.tahu.protobuf.Payload.Metric.is_historical', index=4, number=5, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='is_transient', full_name='org.eclipse.tahu.protobuf.Payload.Metric.is_transient', index=5, number=6, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='is_null', full_name='org.eclipse.tahu.protobuf.Payload.Metric.is_null', index=6, number=7, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='metadata', full_name='org.eclipse.tahu.protobuf.Payload.Metric.metadata', index=7, number=8, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='properties', full_name='org.eclipse.tahu.protobuf.Payload.Metric.properties', index=8, number=9, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='int_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.int_value', index=9, number=10, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='long_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.long_value', index=10, number=11, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='float_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.float_value', index=11, number=12, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='double_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.double_value', index=12, number=13, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='boolean_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.boolean_value', index=13, number=14, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='string_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.string_value', index=14, number=15, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bytes_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.bytes_value', index=15, number=16, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dataset_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.dataset_value', index=16, number=17, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='template_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.template_value', index=17, number=18, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='extension_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.extension_value', index=18, number=19, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_PAYLOAD_METRIC_METRICVALUEEXTENSION, ], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], @@ -842,42 +1032,42 @@ has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='metrics', full_name='org.eclipse.tahu.protobuf.Payload.metrics', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='seq', full_name='org.eclipse.tahu.protobuf.Payload.seq', index=2, number=3, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='uuid', full_name='org.eclipse.tahu.protobuf.Payload.uuid', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='body', full_name='org.eclipse.tahu.protobuf.Payload.body', index=4, number=5, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_PAYLOAD_TEMPLATE, _PAYLOAD_DATASET, _PAYLOAD_PROPERTYVALUE, _PAYLOAD_PROPERTYSET, _PAYLOAD_PROPERTYSETLIST, _PAYLOAD_METADATA, _PAYLOAD_METRIC, ], enum_types=[ ], - options=None, + serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(6, 536870912), ], @@ -1018,6 +1208,8 @@ _PAYLOAD_METRIC.fields_by_name['extension_value'].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name['value'] _PAYLOAD.fields_by_name['metrics'].message_type = _PAYLOAD_METRIC DESCRIPTOR.message_types_by_name['Payload'] = _PAYLOAD +DESCRIPTOR.enum_types_by_name['DataType'] = _DATATYPE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) Payload = _reflection.GeneratedProtocolMessageType('Payload', (_message.Message,), dict( @@ -1139,6 +1331,5 @@ _sym_db.RegisterMessage(Payload.Metric.MetricValueExtension) -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\031org.eclipse.tahu.protobufB\017SparkplugBProto')) +DESCRIPTOR._options = None # @@protoc_insertion_point(module_scope) diff --git a/python/examples/example.py b/python/examples/example.py deleted file mode 100755 index 107e5acc..00000000 --- a/python/examples/example.py +++ /dev/null @@ -1,283 +0,0 @@ -#!/usr/bin/python -#/******************************************************************************** -# * Copyright (c) 2014, 2018 Cirrus Link Solutions and others -# * -# * This program and the accompanying materials are made available under the -# * terms of the Eclipse Public License 2.0 which is available at -# * http://www.eclipse.org/legal/epl-2.0. -# * -# * SPDX-License-Identifier: EPL-2.0 -# * -# * Contributors: -# * Cirrus Link Solutions - initial implementation -# ********************************************************************************/ -import sys -sys.path.insert(0, "../core/") -#print(sys.path) - -import paho.mqtt.client as mqtt -import sparkplug_b as sparkplug -import time -import random -import string - -from sparkplug_b import * - -# Application Variables -serverUrl = "localhost" -myGroupId = "Sparkplug B Devices" -myNodeName = "Python Edge Node 1" -myDeviceName = "Emulated Device" -publishPeriod = 5000 -myUsername = "admin" -myPassword = "changeme" - -class AliasMap: - Next_Server = 0 - Rebirth = 1 - Reboot = 2 - Dataset = 3 - Node_Metric0 = 4 - Node_Metric1 = 5 - Node_Metric2 = 6 - Node_Metric3 = 7 - Device_Metric0 = 8 - Device_Metric1 = 9 - Device_Metric2 = 10 - Device_Metric3 = 11 - My_Custom_Motor = 12 - -###################################################################### -# The callback for when the client receives a CONNACK response from the server. -###################################################################### -def on_connect(client, userdata, flags, rc): - if rc == 0: - print("Connected with result code "+str(rc)) - else: - print("Failed to connect with result code "+str(rc)) - sys.exit() - - global myGroupId - global myNodeName - - # Subscribing in on_connect() means that if we lose the connection and - # reconnect then subscriptions will be renewed. - client.subscribe("spBv1.0/" + myGroupId + "/NCMD/" + myNodeName + "/#") - client.subscribe("spBv1.0/" + myGroupId + "/DCMD/" + myNodeName + "/#") -###################################################################### - -###################################################################### -# The callback for when a PUBLISH message is received from the server. -###################################################################### -def on_message(client, userdata, msg): - print("Message arrived: " + msg.topic) - tokens = msg.topic.split("/") - - if tokens[0] == "spBv1.0" and tokens[1] == myGroupId and (tokens[2] == "NCMD" or tokens[2] == "DCMD") and tokens[3] == myNodeName: - inboundPayload = sparkplug_b_pb2.Payload() - inboundPayload.ParseFromString(msg.payload) - for metric in inboundPayload.metrics: - if metric.name == "Node Control/Next Server" or metric.alias == AliasMap.Next_Server: - # 'Node Control/Next Server' is an NCMD used to tell the device/client application to - # disconnect from the current MQTT server and connect to the next MQTT server in the - # list of available servers. This is used for clients that have a pool of MQTT servers - # to connect to. - print( "'Node Control/Next Server' is not implemented in this example") - elif metric.name == "Node Control/Rebirth" or metric.alias == AliasMap.Rebirth: - # 'Node Control/Rebirth' is an NCMD used to tell the device/client application to resend - # its full NBIRTH and DBIRTH again. MQTT Engine will send this NCMD to a device/client - # application if it receives an NDATA or DDATA with a metric that was not published in the - # original NBIRTH or DBIRTH. This is why the application must send all known metrics in - # its original NBIRTH and DBIRTH messages. - publishBirth() - elif metric.name == "Node Control/Reboot" or metric.alias == AliasMap.Reboot: - # 'Node Control/Reboot' is an NCMD used to tell a device/client application to reboot - # This can be used for devices that need a full application reset via a soft reboot. - # In this case, we fake a full reboot with a republishing of the NBIRTH and DBIRTH - # messages. - publishBirth() - elif metric.name == "output/Device Metric2" or metric.alias == AliasMap.Device_Metric2: - # This is a metric we declared in our DBIRTH message and we're emulating an output. - # So, on incoming 'writes' to the output we must publish a DDATA with the new output - # value. If this were a real output we'd write to the output and then read it back - # before publishing a DDATA message. - - # We know this is an Int16 because of how we declated it in the DBIRTH - newValue = metric.int_value - print( "CMD message for output/Device Metric2 - New Value: {}".format(newValue)) - - # Create the DDATA payload - Use the alias because this isn't the DBIRTH - payload = sparkplug.getDdataPayload() - addMetric(payload, None, AliasMap.Device_Metric2, MetricDataType.Int16, newValue) - - # Publish a message data - byteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DDATA/" + myNodeName + "/" + myDeviceName, byteArray, 0, False) - elif metric.name == "output/Device Metric3" or metric.alias == AliasMap.Device_Metric3: - # This is a metric we declared in our DBIRTH message and we're emulating an output. - # So, on incoming 'writes' to the output we must publish a DDATA with the new output - # value. If this were a real output we'd write to the output and then read it back - # before publishing a DDATA message. - - # We know this is an Boolean because of how we declated it in the DBIRTH - newValue = metric.boolean_value - print( "CMD message for output/Device Metric3 - New Value: %r" % newValue) - - # Create the DDATA payload - use the alias because this isn't the DBIRTH - payload = sparkplug.getDdataPayload() - addMetric(payload, None, AliasMap.Device_Metric3, MetricDataType.Boolean, newValue) - - # Publish a message data - byteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DDATA/" + myNodeName + "/" + myDeviceName, byteArray, 0, False) - else: - print( "Unknown command: " + metric.name) - else: - print( "Unknown command...") - - print( "Done publishing") -###################################################################### - -###################################################################### -# Publish the BIRTH certificates -###################################################################### -def publishBirth(): - publishNodeBirth() - publishDeviceBirth() -###################################################################### - -###################################################################### -# Publish the NBIRTH certificate -###################################################################### -def publishNodeBirth(): - print( "Publishing Node Birth") - - # Create the node birth payload - payload = sparkplug.getNodeBirthPayload() - - # Set up the Node Controls - addMetric(payload, "Node Control/Next Server", AliasMap.Next_Server, MetricDataType.Boolean, False) - addMetric(payload, "Node Control/Rebirth", AliasMap.Rebirth, MetricDataType.Boolean, False) - addMetric(payload, "Node Control/Reboot", AliasMap.Reboot, MetricDataType.Boolean, False) - - # Add some regular node metrics - addMetric(payload, "Node Metric0", AliasMap.Node_Metric0, MetricDataType.String, "hello node") - addMetric(payload, "Node Metric1", AliasMap.Node_Metric1, MetricDataType.Boolean, True) - addNullMetric(payload, "Node Metric3", AliasMap.Node_Metric3, MetricDataType.Int32) - - # Create a DataSet (012 - 345) two rows with Int8, Int16, and Int32 contents and headers Int8s, Int16s, Int32s and add it to the payload - columns = ["Int8s", "Int16s", "Int32s"] - types = [DataSetDataType.Int8, DataSetDataType.Int16, DataSetDataType.Int32] - dataset = initDatasetMetric(payload, "DataSet", AliasMap.Dataset, columns, types) - row = dataset.rows.add() - element = row.elements.add(); - element.int_value = 0 - element = row.elements.add(); - element.int_value = 1 - element = row.elements.add(); - element.int_value = 2 - row = dataset.rows.add() - element = row.elements.add(); - element.int_value = 3 - element = row.elements.add(); - element.int_value = 4 - element = row.elements.add(); - element.int_value = 5 - - # Add a metric with a custom property - metric = addMetric(payload, "Node Metric2", AliasMap.Node_Metric2, MetricDataType.Int16, 13) - metric.properties.keys.extend(["engUnit"]) - propertyValue = metric.properties.values.add() - propertyValue.type = ParameterDataType.String - propertyValue.string_value = "MyCustomUnits" - - # Create the UDT definition value which includes two UDT members and a single parameter and add it to the payload - template = initTemplateMetric(payload, "_types_/Custom_Motor", None, None) # No alias for Template definitions - templateParameter = template.parameters.add() - templateParameter.name = "Index" - templateParameter.type = ParameterDataType.String - templateParameter.string_value = "0" - addMetric(template, "RPMs", None, MetricDataType.Int32, 0) # No alias in UDT members - addMetric(template, "AMPs", None, MetricDataType.Int32, 0) # No alias in UDT members - - # Publish the node birth certificate - byteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/NBIRTH/" + myNodeName, byteArray, 0, False) -###################################################################### - -###################################################################### -# Publish the DBIRTH certificate -###################################################################### -def publishDeviceBirth(): - print( "Publishing Device Birth") - - # Get the payload - payload = sparkplug.getDeviceBirthPayload() - - # Add some device metrics - addMetric(payload, "input/Device Metric0", AliasMap.Device_Metric0, MetricDataType.String, "hello device") - addMetric(payload, "input/Device Metric1", AliasMap.Device_Metric1, MetricDataType.Boolean, True) - addMetric(payload, "output/Device Metric2", AliasMap.Device_Metric2, MetricDataType.Int16, 16) - addMetric(payload, "output/Device Metric3", AliasMap.Device_Metric3, MetricDataType.Boolean, True) - - # Create the UDT definition value which includes two UDT members and a single parameter and add it to the payload - template = initTemplateMetric(payload, "My_Custom_Motor", AliasMap.My_Custom_Motor, "Custom_Motor") - templateParameter = template.parameters.add() - templateParameter.name = "Index" - templateParameter.type = ParameterDataType.String - templateParameter.string_value = "1" - addMetric(template, "RPMs", None, MetricDataType.Int32, 123) # No alias in UDT members - addMetric(template, "AMPs", None, MetricDataType.Int32, 456) # No alias in UDT members - - # Publish the initial data with the Device BIRTH certificate - totalByteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DBIRTH/" + myNodeName + "/" + myDeviceName, totalByteArray, 0, False) -###################################################################### - -###################################################################### -# Main Application -###################################################################### -print("Starting main application") - -# Create the node death payload -deathPayload = sparkplug.getNodeDeathPayload() - -# Start of main program - Set up the MQTT client connection -client = mqtt.Client(serverUrl, 1883, 60) -client.on_connect = on_connect -client.on_message = on_message -client.username_pw_set(myUsername, myPassword) -deathByteArray = bytearray(deathPayload.SerializeToString()) -client.will_set("spBv1.0/" + myGroupId + "/NDEATH/" + myNodeName, deathByteArray, 0, False) -client.connect(serverUrl, 1883, 60) - -# Short delay to allow connect callback to occur -time.sleep(.1) -client.loop() - -# Publish the birth certificates -publishBirth() - -while True: - # Periodically publish some new data - payload = sparkplug.getDdataPayload() - - # Add some random data to the inputs - addMetric(payload, None, AliasMap.Device_Metric0, MetricDataType.String, ''.join(random.choice(string.ascii_lowercase) for i in range(12))) - - # Note this data we're setting to STALE via the propertyset as an example - metric = addMetric(payload, None, AliasMap.Device_Metric1, MetricDataType.Boolean, random.choice([True, False])) - metric.properties.keys.extend(["Quality"]) - propertyValue = metric.properties.values.add() - propertyValue.type = ParameterDataType.Int32 - propertyValue.int_value = 500 - - # Publish a message data - byteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DDATA/" + myNodeName + "/" + myDeviceName, byteArray, 0, False) - - # Sit and wait for inbound or outbound events - for _ in range(5): - time.sleep(.1) - client.loop() -###################################################################### diff --git a/python/examples/example_raspberry_pi.py b/python/examples/example_raspberry_pi.py deleted file mode 100755 index afd4a9f3..00000000 --- a/python/examples/example_raspberry_pi.py +++ /dev/null @@ -1,247 +0,0 @@ -#!/usr/bin/python -#/******************************************************************************** -# * Copyright (c) 2014, 2018 Cirrus Link Solutions and others -# * -# * This program and the accompanying materials are made available under the -# * terms of the Eclipse Public License 2.0 which is available at -# * http://www.eclipse.org/legal/epl-2.0. -# * -# * SPDX-License-Identifier: EPL-2.0 -# * -# * Contributors: -# * Cirrus Link Solutions - initial implementation -# ********************************************************************************/ -import sys -sys.path.insert(0, "client_lib") - -import paho.mqtt.client as mqtt -import pibrella -import sparkplug_b as sparkplug -import time -import random -import subprocess - -from sparkplug_b import * -from threading import Lock - -serverUrl = "192.168.1.53" -myGroupId = "Sparkplug B Devices" -myNodeName = "Python Raspberry Pi" -mySubNodeName = "Pibrella" -myUsername = "admin" -myPassword = "changeme" -lock = Lock() - -###################################################################### -# Button press event handler -###################################################################### -def button_changed(pin): - outboundPayload = sparkplug.getDdataPayload() - buttonValue = pin.read() - if buttonValue == 1: - print("You pressed the button!") - else: - print("You released the button!") - addMetric(outboundPayload, "button", None, MetricDataType.Boolean, buttonValue); - byteArray = bytearray(outboundPayload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DDATA/" + myNodeName + "/" + mySubNodeName, byteArray, 0, False) - -###################################################################### -# Input change event handler -###################################################################### -def input_a_changed(pin): - input_changed("Inputs/a", pin) -def input_b_changed(pin): - input_changed("Inputs/b", pin) -def input_c_changed(pin): - input_changed("Inputs/c", pin) -def input_d_changed(pin): - input_changed("Inputs/d", pin) -def input_changed(name, pin): - lock.acquire() - try: - # Lock the block around the callback handler to prevent inproper access based on debounce - outboundPayload = sparkplug.getDdataPayload() - addMetric(outboundPayload, name, None, MetricDataType.Boolean, pin.read()); - byteArray = bytearray(outboundPayload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DDATA/" + myNodeName + "/" + mySubNodeName, byteArray, 0, False) - finally: - lock.release() -###################################################################### - -###################################################################### -# The callback for when the client receives a CONNACK response from the server. -###################################################################### -def on_connect(client, userdata, flags, rc): - global myGroupId - global myNodeName - print("Connected with result code "+str(rc)) - - # Subscribing in on_connect() means that if we lose the connection and - # reconnect then subscriptions will be renewed. - client.subscribe("spBv1.0/" + myGroupId + "/NCMD/" + myNodeName + "/#") - client.subscribe("spBv1.0/" + myGroupId + "/DCMD/" + myNodeName + "/#") -###################################################################### - -###################################################################### -# The callback for when a PUBLISH message is received from the server. -###################################################################### -def on_message(client, userdata, msg): - print("Message arrived: " + msg.topic) - tokens = msg.topic.split("/") - - if tokens[0] == "spBv1.0" and tokens[1] == myGroupId and tokens[2] == "DCMD" and tokens[3] == myNodeName: - inboundPayload = sparkplug_b_pb2.Payload() - inboundPayload.ParseFromString(msg.payload) - outboundPayload = sparkplug.getDdataPayload() - - for metric in inboundPayload.metrics: - print "Tag Name: " + metric.name - if metric.name == "Outputs/e": - pibrella.output.e.write(metric.boolean_value) - addMetric(outboundPayload, "Outputs/e", None, MetricDataType.Boolean, pibrella.output.e.read()) - elif metric.name == "Outputs/f": - pibrella.output.f.write(metric.boolean_value) - addMetric(outboundPayload, "Outputs/f", None, MetricDataType.Boolean, pibrella.output.f.read()) - elif metric.name == "Outputs/g": - pibrella.output.g.write(metric.boolean_value) - addMetric(outboundPayload, "Outputs/g", None, MetricDataType.Boolean, pibrella.output.g.read()) - elif metric.name == "Outputs/h": - pibrella.output.h.write(metric.boolean_value) - addMetric(outboundPayload, "Outputs/h", None, MetricDataType.Boolean, pibrella.output.h.read()) - elif metric.name == "Outputs/LEDs/green": - if metric.boolean_value: - pibrella.light.green.on() - else: - pibrella.light.green.off() - addMetric(outboundPayload, "Outputs/LEDs/green", None, MetricDataType.Boolean, pibrella.light.green.read()) - elif metric.name == "Outputs/LEDs/red": - if metric.boolean_value: - pibrella.light.red.on() - else: - pibrella.light.red.off() - addMetric(outboundPayload, "Outputs/LEDs/red", None, MetricDataType.Boolean, pibrella.light.red.read()) - elif metric.name == "Outputs/LEDs/yellow": - if metric.boolean_value: - pibrella.light.yellow.on() - else: - pibrella.light.yellow.off() - addMetric(outboundPayload, "Outputs/LEDs/yellow", None, MetricDataType.Boolean, pibrella.light.yellow.read()) - elif metric.name == "buzzer_fail": - pibrella.buzzer.fail() - elif metric.name == "buzzer_success": - pibrella.buzzer.success() - - byteArray = bytearray(outboundPayload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DDATA/" + myNodeName + "/" + mySubNodeName, byteArray, 0, False) - elif tokens[0] == "spBv1.0" and tokens[1] == myGroupId and tokens[2] == "NCMD" and tokens[3] == myNodeName: - inboundPayload = sparkplug_b_pb2.Payload() - inboundPayload.ParseFromString(msg.payload) - for metric in inboundPayload.metrics: - if metric.name == "Node Control/Next Server": - publishBirths() - if metric.name == "Node Control/Rebirth": - publishBirths() - if metric.name == "Node Control/Reboot": - publishBirths() - else: - print "Unknown command..." - - print "done publishing" -###################################################################### - -###################################################################### -# Publish the Birth certificate -###################################################################### -def publishBirths(): - print("Publishing Birth") - - # Create the NBIRTH payload - payload = sparkplug.getNodeBirthPayload() - - # Add the Node Controls - addMetric(payload, "Node Control/Next Server", None, MetricDataType.Boolean, False) - addMetric(payload, "Node Control/Rebirth", None, MetricDataType.Boolean, False) - addMetric(payload, "Node Control/Reboot", None, MetricDataType.Boolean, False) - - # Set up the device Parameters - p = subprocess.Popen('uname -a', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - for line in p.stdout.readlines(): - unameOutput = line, - retVal = p.wait() - p = subprocess.Popen('cat /proc/cpuinfo | grep Hardware', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - for line in p.stdout.readlines(): - hardwareOutput = line, - retVal = p.wait() - p = subprocess.Popen('cat /proc/cpuinfo | grep Revision', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - for line in p.stdout.readlines(): - revisionOutput = line, - retVal = p.wait() - p = subprocess.Popen('cat /proc/cpuinfo | grep Serial', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - for line in p.stdout.readlines(): - serialOutput = line, - retVal = p.wait() - addMetric(payload, "Parameters/sw_version", None, MetricDataType.String, ''.join(unameOutput)) - addMetric(payload, "Parameters/hw_version", None, MetricDataType.String, ''.join(hardwareOutput)) - addMetric(payload, "Parameters/hw_revision", None, MetricDataType.String, ''.join(revisionOutput)) - addMetric(payload, "Parameters/hw_serial", None, MetricDataType.String, ''.join(serialOutput)) - - # Publish the NBIRTH certificate - byteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/NBIRTH/" + myNodeName, byteArray, 0, False) - - # Set up the DBIRTH with the input metrics - payload = sparkplug.getDeviceBirthPayload() - - addMetric(payload, "Inputs/a", None, MetricDataType.Boolean, pibrella.input.a.read()) - addMetric(payload, "Inputs/b", None, MetricDataType.Boolean, pibrella.input.b.read()) - addMetric(payload, "Inputs/c", None, MetricDataType.Boolean, pibrella.input.c.read()) - addMetric(payload, "Inputs/d", None, MetricDataType.Boolean, pibrella.input.d.read()) - - # Set up the output states on first run so Ignition and MQTT Engine are aware of them - addMetric(payload, "Outputs/e", None, MetricDataType.Boolean, pibrella.output.e.read()) - addMetric(payload, "Outputs/f", None, MetricDataType.Boolean, pibrella.output.f.read()) - addMetric(payload, "Outputs/g", None, MetricDataType.Boolean, pibrella.output.g.read()) - addMetric(payload, "Outputs/h", None, MetricDataType.Boolean, pibrella.output.h.read()) - addMetric(payload, "Outputs/LEDs/green", None, MetricDataType.Boolean, pibrella.light.green.read()) - addMetric(payload, "Outputs/LEDs/red", None, MetricDataType.Boolean, pibrella.light.red.read()) - addMetric(payload, "Outputs/LEDs/yellow", None, MetricDataType.Boolean, pibrella.light.yellow.read()) - addMetric(payload, "button", None, MetricDataType.Boolean, pibrella.button.read()) - addMetric(payload, "buzzer_fail", None, MetricDataType.Boolean, 0) - addMetric(payload, "buzzer_success", None, MetricDataType.Boolean, 0) - - # Publish the initial data with the DBIRTH certificate - totalByteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DBIRTH/" + myNodeName + "/" + mySubNodeName, totalByteArray, 0, False) -###################################################################### - -# Create the NDEATH payload -deathPayload = sparkplug.getNodeDeathPayload() - -# Start of main program - Set up the MQTT client connection -client = mqtt.Client(serverUrl, 1883, 60) -client.on_connect = on_connect -client.on_message = on_message -client.username_pw_set(myUsername, myPassword) -deathByteArray = bytearray(deathPayload.SerializeToString()) -client.will_set("spBv1.0/" + myGroupId + "/NDEATH/" + myNodeName, deathByteArray, 0, False) -client.connect(serverUrl, 1883, 60) - -# Short delay to allow connect callback to occur -time.sleep(.1) -client.loop() - -publishBirths() - -# Set up the button press event handler -pibrella.button.changed(button_changed) -pibrella.input.a.changed(input_a_changed) -pibrella.input.b.changed(input_b_changed) -pibrella.input.c.changed(input_c_changed) -pibrella.input.d.changed(input_d_changed) - -# Sit and wait for inbound or outbound events -while True: - time.sleep(.1) - client.loop() - diff --git a/python/examples/example_simple.py b/python/examples/example_simple.py deleted file mode 100755 index 9207be46..00000000 --- a/python/examples/example_simple.py +++ /dev/null @@ -1,269 +0,0 @@ -#!/usr/bin/python -#/******************************************************************************** -# * Copyright (c) 2014, 2018 Cirrus Link Solutions and others -# * -# * This program and the accompanying materials are made available under the -# * terms of the Eclipse Public License 2.0 which is available at -# * http://www.eclipse.org/legal/epl-2.0. -# * -# * SPDX-License-Identifier: EPL-2.0 -# * -# * Contributors: -# * Cirrus Link Solutions - initial implementation -# ********************************************************************************/ -import sys -sys.path.insert(0, "../core/") -#print(sys.path) - -import paho.mqtt.client as mqtt -import sparkplug_b as sparkplug -import time -import random -import string - -from sparkplug_b import * - -# Application Variables -serverUrl = "localhost" -myGroupId = "Sparkplug B Devices" -myNodeName = "Python Edge Node 1" -myDeviceName = "Emulated Device" -publishPeriod = 5000 -myUsername = "admin" -myPassword = "changeme" - -###################################################################### -# The callback for when the client receives a CONNACK response from the server. -###################################################################### -def on_connect(client, userdata, flags, rc): - if rc == 0: - print("Connected with result code "+str(rc)) - else: - print("Failed to connect with result code "+str(rc)) - sys.exit() - - global myGroupId - global myNodeName - - # Subscribing in on_connect() means that if we lose the connection and - # reconnect then subscriptions will be renewed. - client.subscribe("spBv1.0/" + myGroupId + "/NCMD/" + myNodeName + "/#") - client.subscribe("spBv1.0/" + myGroupId + "/DCMD/" + myNodeName + "/#") -###################################################################### - -###################################################################### -# The callback for when a PUBLISH message is received from the server. -###################################################################### -def on_message(client, userdata, msg): - print("Message arrived: " + msg.topic) - tokens = msg.topic.split("/") - - if tokens[0] == "spBv1.0" and tokens[1] == myGroupId and (tokens[2] == "NCMD" or tokens[2] == "DCMD") and tokens[3] == myNodeName: - inboundPayload = sparkplug_b_pb2.Payload() - inboundPayload.ParseFromString(msg.payload) - for metric in inboundPayload.metrics: - if metric.name == "Node Control/Next Server": - # 'Node Control/Next Server' is an NCMD used to tell the device/client application to - # disconnect from the current MQTT server and connect to the next MQTT server in the - # list of available servers. This is used for clients that have a pool of MQTT servers - # to connect to. - print( "'Node Control/Next Server' is not implemented in this example") - elif metric.name == "Node Control/Rebirth": - # 'Node Control/Rebirth' is an NCMD used to tell the device/client application to resend - # its full NBIRTH and DBIRTH again. MQTT Engine will send this NCMD to a device/client - # application if it receives an NDATA or DDATA with a metric that was not published in the - # original NBIRTH or DBIRTH. This is why the application must send all known metrics in - # its original NBIRTH and DBIRTH messages. - publishBirth() - elif metric.name == "Node Control/Reboot": - # 'Node Control/Reboot' is an NCMD used to tell a device/client application to reboot - # This can be used for devices that need a full application reset via a soft reboot. - # In this case, we fake a full reboot with a republishing of the NBIRTH and DBIRTH - # messages. - publishBirth() - elif metric.name == "output/Device Metric2": - # This is a metric we declared in our DBIRTH message and we're emulating an output. - # So, on incoming 'writes' to the output we must publish a DDATA with the new output - # value. If this were a real output we'd write to the output and then read it back - # before publishing a DDATA message. - - # We know this is an Int16 because of how we declated it in the DBIRTH - newValue = metric.int_value - print( "CMD message for output/Device Metric2 - New Value: {}".format(newValue)) - - # Create the DDATA payload - payload = sparkplug.getDdataPayload() - addMetric(payload, None, None, MetricDataType.Int16, newValue) - - # Publish a message data - byteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DDATA/" + myNodeName + "/" + myDeviceName, byteArray, 0, False) - elif metric.name == "output/Device Metric3": - # This is a metric we declared in our DBIRTH message and we're emulating an output. - # So, on incoming 'writes' to the output we must publish a DDATA with the new output - # value. If this were a real output we'd write to the output and then read it back - # before publishing a DDATA message. - - # We know this is an Boolean because of how we declated it in the DBIRTH - newValue = metric.boolean_value - print( "CMD message for output/Device Metric3 - New Value: %r" % newValue) - - # Create the DDATA payload - payload = sparkplug.getDdataPayload() - addMetric(payload, None, None, MetricDataType.Boolean, newValue) - - # Publish a message data - byteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DDATA/" + myNodeName + "/" + myDeviceName, byteArray, 0, False) - else: - print( "Unknown command: " + metric.name) - else: - print( "Unknown command...") - - print( "Done publishing") -###################################################################### - -###################################################################### -# Publish the BIRTH certificates -###################################################################### -def publishBirth(): - publishNodeBirth() - publishDeviceBirth() -###################################################################### - -###################################################################### -# Publish the NBIRTH certificate -###################################################################### -def publishNodeBirth(): - print( "Publishing Node Birth") - - # Create the node birth payload - payload = sparkplug.getNodeBirthPayload() - - # Set up the Node Controls - addMetric(payload, "Node Control/Next Server", None, MetricDataType.Boolean, False) - addMetric(payload, "Node Control/Rebirth", None, MetricDataType.Boolean, False) - addMetric(payload, "Node Control/Reboot", None, MetricDataType.Boolean, False) - - # Add some regular node metrics - addMetric(payload, "Node Metric0", None, MetricDataType.String, "hello node") - addMetric(payload, "Node Metric1", None, MetricDataType.Boolean, True) - addNullMetric(payload, "Node Metric3", None, MetricDataType.Int32) - - # Create a DataSet (012 - 345) two rows with Int8, Int16, and Int32 contents and headers Int8s, Int16s, Int32s and add it to the payload - columns = ["Int8s", "Int16s", "Int32s"] - types = [DataSetDataType.Int8, DataSetDataType.Int16, DataSetDataType.Int32] - dataset = initDatasetMetric(payload, "DataSet", None, columns, types) - row = dataset.rows.add() - element = row.elements.add(); - element.int_value = 0 - element = row.elements.add(); - element.int_value = 1 - element = row.elements.add(); - element.int_value = 2 - row = dataset.rows.add() - element = row.elements.add(); - element.int_value = 3 - element = row.elements.add(); - element.int_value = 4 - element = row.elements.add(); - element.int_value = 5 - - # Add a metric with a custom property - metric = addMetric(payload, "Node Metric2", None, MetricDataType.Int16, 13) - metric.properties.keys.extend(["engUnit"]) - propertyValue = metric.properties.values.add() - propertyValue.type = ParameterDataType.String - propertyValue.string_value = "MyCustomUnits" - - # Create the UDT definition value which includes two UDT members and a single parameter and add it to the payload - template = initTemplateMetric(payload, "_types_/Custom_Motor", None, None) - templateParameter = template.parameters.add() - templateParameter.name = "Index" - templateParameter.type = ParameterDataType.String - templateParameter.string_value = "0" - addMetric(template, "RPMs", None, MetricDataType.Int32, 0) - addMetric(template, "AMPs", None, MetricDataType.Int32, 0) - - # Publish the node birth certificate - byteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/NBIRTH/" + myNodeName, byteArray, 0, False) -###################################################################### - -###################################################################### -# Publish the DBIRTH certificate -###################################################################### -def publishDeviceBirth(): - print( "Publishing Device Birth") - - # Get the payload - payload = sparkplug.getDeviceBirthPayload() - - # Add some device metrics - addMetric(payload, "input/Device Metric0", None, MetricDataType.String, "hello device") - addMetric(payload, "input/Device Metric1", None, MetricDataType.Boolean, True) - addMetric(payload, "output/Device Metric2", None, MetricDataType.Int16, 16) - addMetric(payload, "output/Device Metric3", None, MetricDataType.Boolean, True) - addMetric(payload, "DateTime Metric", None, MetricDataType.DateTime, long(time.time() * 1000)) - - # Create the UDT definition value which includes two UDT members and a single parameter and add it to the payload - template = initTemplateMetric(payload, "My_Custom_Motor", None, "Custom_Motor") - templateParameter = template.parameters.add() - templateParameter.name = "Index" - templateParameter.type = ParameterDataType.String - templateParameter.string_value = "1" - addMetric(template, "RPMs", None, MetricDataType.Int32, 123) - addMetric(template, "AMPs", None, MetricDataType.Int32, 456) - - # Publish the initial data with the Device BIRTH certificate - totalByteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DBIRTH/" + myNodeName + "/" + myDeviceName, totalByteArray, 0, False) -###################################################################### - -###################################################################### -# Main Application -###################################################################### -print("Starting main application") - -# Create the node death payload -deathPayload = sparkplug.getNodeDeathPayload() - -# Start of main program - Set up the MQTT client connection -client = mqtt.Client(serverUrl, 1883, 60) -client.on_connect = on_connect -client.on_message = on_message -client.username_pw_set(myUsername, myPassword) -deathByteArray = bytearray(deathPayload.SerializeToString()) -client.will_set("spBv1.0/" + myGroupId + "/NDEATH/" + myNodeName, deathByteArray, 0, False) -client.connect(serverUrl, 1883, 60) - -# Short delay to allow connect callback to occur -time.sleep(.1) -client.loop() - -# Publish the birth certificates -publishBirth() - -while True: - # Periodically publish some new data - payload = sparkplug.getDdataPayload() - - # Add some random data to the inputs - addMetric(payload, None, None, MetricDataType.String, ''.join(random.choice(string.ascii_lowercase) for i in range(12))) - - # Note this data we're setting to STALE via the propertyset as an example - metric = addMetric(payload, None, None, MetricDataType.Boolean, random.choice([True, False])) - metric.properties.keys.extend(["Quality"]) - propertyValue = metric.properties.values.add() - propertyValue.type = ParameterDataType.Int32 - propertyValue.int_value = 500 - - # Publish a message data - byteArray = bytearray(payload.SerializeToString()) - client.publish("spBv1.0/" + myGroupId + "/DDATA/" + myNodeName + "/" + myDeviceName, byteArray, 0, False) - - # Sit and wait for inbound or outbound events - for _ in range(5): - time.sleep(.1) - client.loop() -###################################################################### diff --git a/python/examples/fuller_example.py b/python/examples/fuller_example.py new file mode 100755 index 00000000..b775394e --- /dev/null +++ b/python/examples/fuller_example.py @@ -0,0 +1,212 @@ +#!/usr/bin/env python3 + +############################################################################# +# Copyright (c) 2014, 2018, 2020 Cirrus Link Solutions and others +# +# This program and the accompanying materials are made available under the +# terms of the Eclipse Public License 2.0 which is available at +# http://www.eclipse.org/legal/epl-2.0. +# +# SPDX-License-Identifier: EPL-2.0 +# +# Contributors: +# Cirrus Link Solutions - initial implementation +# Justin Brzozoski @ SignalFire Wireless Telemetry - major rewrite +############################################################################# + +# These are the basic imports required for a Sparkplug Edge Node. +# The logging import is not explicitly necessary, but is highly recommended. +import logging +logging.basicConfig(level=logging.DEBUG) +logger = logging.getLogger('edge_node_example') +logger.info('Starting Python Sparkplug edge node demonstration') + +import time +from datetime import datetime, timezone +import random +import string + +import tahu +import tahu.edge + +### Commonly configured items +my_group_id = "Tahu Sample" +my_node_name = "Edge Node 1" +my_device_name = "Emulated Device" +# You can define multiple connection setups here, and the edge node will rotate through them +# in response to "Next Server" commands. +my_mqtt_params = [ + #tahu.mqtt_params('localhost', username='admin', password='changeme'), + #tahu.mqtt_params('securehost', certfile='my_cert.pem', keyfile='my_private.key', tls_enabled=True), + #tahu.mqtt_params('test.mosquitto.org'), + tahu.mqtt_params('broker.hivemq.com'), +] + +def sample_cmd_handler(tag, context, value): + """ + Simplest example of a callback for a metric cmd_handler + + This cmd_handler will log the received info and then echo the new value back over Sparkplug + + :param tag: Metric object that this command was received on + :param context: the optional cmd_context object provided to the Metric when it was created + :param value: the new value received over Sparkplug + + """ + logger.info('sample_cmd_handler tag={} context={} value={}'.format(tag.name, + context, + value)) + tag.change_value(value) + +def fancier_date_handler(tag, context, value): + """ + A simple example of a callback for a datetime-based metric cmd_handler + + This cmd_handler will log the received time and then echo back the current time over Sparkplug + + :param tag: Metric object that this command was received on + :param context: the optional cmd_context object provided to the Metric when it was created + :param value: the new value received over Sparkplug + + """ + dt = datetime.fromtimestamp(tahu.timestamp_from_sparkplug(value), + timezone.utc) + logger.info('fancier_date_handler received {}'.format(str(dt))) + tag.change_value(tahu.timestamp_to_sparkplug()) + +# There is a discrepancy between Ignition as of version 8.1.x (or older) and the Sparkplug spec as of version 2.2. +# The spec says in section 15.2.1 that UInt32 should be stored in the int_value field of the protobuf, +# but Ignition and the reference code have historically stored UInt32 in the long_value field. +# +# Our library is flexible and will accept incoming values from either value field gracefully. +# However, outgoing UInt32 can only be done in one or the other. +# +# When you first setup your node, you can pass a u32_in_long parameter to control this behavior for a +# edge node and all devices under it. +# Setting it to True will work in Ignition's style, setting it to False will match the spec's style. +my_edge_node = tahu.edge.Node(my_mqtt_params, my_group_id, + my_node_name, + logger=logger, u32_in_long=True) +my_subdevice = tahu.edge.Device(my_edge_node, my_device_name) + +# Here are examples of how to define one of each of the basic types. +# The value you pass in when creating the metric just sets the initial value. +# Hold onto the return object to be able to adjust the value later. +s8_test_tag = tahu.edge.Metric(my_subdevice, 'int8_test', + tahu.DataType.Int8, value=-1, + cmd_handler=sample_cmd_handler) +s16_test_tag = tahu.edge.Metric(my_subdevice, 'int16_test', + tahu.DataType.Int16, value=-1, + cmd_handler=sample_cmd_handler) +s32_test_tag = tahu.edge.Metric(my_subdevice, 'int32_test', + tahu.DataType.Int32, value=-1, + cmd_handler=sample_cmd_handler) +s64_test_tag = tahu.edge.Metric(my_subdevice, 'int64_test', + tahu.DataType.Int64, value=-1, + cmd_handler=sample_cmd_handler) +u8_test_tag = tahu.edge.Metric(my_subdevice, 'uint8_test', + tahu.DataType.UInt8, value=1, + cmd_handler=sample_cmd_handler) +u16_test_tag = tahu.edge.Metric(my_subdevice, 'uint16_test', + tahu.DataType.UInt16, value=1, + cmd_handler=sample_cmd_handler) +u32_test_tag = tahu.edge.Metric(my_subdevice, 'uint32_test', + tahu.DataType.UInt32, value=1, + cmd_handler=sample_cmd_handler) +u64_test_tag = tahu.edge.Metric(my_subdevice, 'uint64_test', + tahu.DataType.UInt64, value=1, + cmd_handler=sample_cmd_handler) +float_test_tag = tahu.edge.Metric(my_subdevice, 'float_test', + tahu.DataType.Float, value=1.01, + cmd_handler=sample_cmd_handler) +double_test_tag = tahu.edge.Metric(my_subdevice, 'double_test', + tahu.DataType.Double, + value=1.02, + cmd_handler=sample_cmd_handler) +boolean_test_tag = tahu.edge.Metric(my_subdevice, 'boolean_test', + tahu.DataType.Boolean, + value=True, + cmd_handler=sample_cmd_handler) +string_test_tag = tahu.edge.Metric(my_subdevice, 'string_test', + tahu.DataType.String, + value="Hello, world!", + cmd_handler=sample_cmd_handler) +datetime_test_tag = tahu.edge.Metric(my_subdevice, 'datetime_test', + tahu.DataType.DateTime, + value=tahu.timestamp_to_sparkplug(), + cmd_handler=fancier_date_handler) +# If you want the current time use tahu.timestamp_to_sparkplug() without parameters. +# If you want to convert from a datetime, pass in the datetime.timestamp like this: +# sample_datetime = datetime(2006, 11, 21, 16, 30, tzinfo=timezone.utc) +# alternative_time_value = tahu.timestamp_to_sparkplug(sample_datetime.timestamp()) + +# Here are examples of how to use properties. +# Properties are attached to a metric after creating it. +# You can define them one at a time with detailed control using MetricProperty. +# If you don't need as much control over datatypes, you can define a group all at once using bulk_properties. +# And there are ignition_x_property functions for adding well-known properties that Ignition looks for. +# If you have a property that you need to adjust later, hold onto the return object when you create it. +property_test_tag = tahu.edge.Metric(my_subdevice, 'property_test', + tahu.DataType.UInt64, + value=23, + cmd_handler=sample_cmd_handler) +tahu.edge.MetricProperty(property_test_tag, 'prop_name', + tahu.DataType.UInt64, value=5, + report_with_data=False) +tahu.edge.bulk_properties(property_test_tag, {'dictstr':'whatever', + 'dictdouble':3.14159, + 'dictint64':64738}) +tahu.edge.ignition_documentation_property(property_test_tag, + 'A tag for demonstrating lots of property samples!') +tahu.edge.ignition_low_property(property_test_tag, 0) +tahu.edge.ignition_high_property(property_test_tag, 10) +tahu.edge.ignition_unit_property(property_test_tag, 'smoots') +property_test_tag_quality = tahu.edge.ignition_quality_property(property_test_tag) + +# Here's an example of a dataset tag. +# Locally, they are handled as tahu.DataSet objects. +# To create a dataset, you first pass in a dict listing column names and datatypes. +# You can then manipulate the data with add_rows, get_rows, remove_rows and other methods. +# After that, the dataset object is passed into the metric value as normal. +sample_dataset = tahu.DataSet({'U32Col':tahu.DataType.UInt32, + 'StrCol':tahu.DataType.String, + 'DoubleCol':tahu.DataType.Double}) +sample_dataset.add_rows([[15, 'Fifteen', 3.14159], [0, 'Zero', 6.07E27], + [65535, 'FunFunFun', (2 / 3)]]) +dataset_test_tag = tahu.edge.Metric(my_subdevice, 'dataset_sample', + tahu.DataType.DataSet, + value=sample_dataset, + cmd_handler=sample_cmd_handler) + +# Now that we've created all of our devices, metrics, and properties, +# we can connect the edge node to the broker. +my_edge_node.online() +while not my_edge_node.is_connected(): + # TODO - Add some sort of timeout feature? + pass +loop_count = 0 +while True: + # Sit and wait for a moment... + time.sleep(5) + + # Send some random data on the string_test tag right away... (triggers an immediate data message) + new_string = ''.join(random.sample(string.ascii_lowercase, 12)) + string_test_tag.change_value(new_string) + + # Next, pile up a few changes all on the same subdevice, and trigger a collected + # data message containing all of those manually. (Will not work for tags on different subdevices) + + # Randomly change the quality on the property_test_tag... + new_quality = random.choice([tahu.edge.IgnitionQualityCode.Good, + tahu.edge.IgnitionQualityCode.Error_IO]) + property_test_tag_quality.change_value(new_quality, send_immediate=False) + + # Report how many times we've gone around this loop in the uint8 + u8_test_tag.change_value(loop_count, send_immediate=False) + + # Send any unsent changes + my_subdevice.send_data(changed_only=True) + my_edge_node.send_data(changed_only=True) + + loop_count = loop_count + 1 + diff --git a/python/examples/pibrella_example.py b/python/examples/pibrella_example.py new file mode 100755 index 00000000..d6398886 --- /dev/null +++ b/python/examples/pibrella_example.py @@ -0,0 +1,175 @@ +#!/usr/bin/env python3 + +############################################################################# +# Copyright (c) 2014, 2018, 2020, 2022 Cirrus Link Solutions and others +# +# This program and the accompanying materials are made available under the +# terms of the Eclipse Public License 2.0 which is available at +# http://www.eclipse.org/legal/epl-2.0. +# +# SPDX-License-Identifier: EPL-2.0 +# +# Contributors: +# Cirrus Link Solutions - initial implementation +# Justin Brzozoski @ SignalFire Wireless Telemetry - major rewrite +############################################################################# + +# We setup a basic logger for the top-level application, which indirectly +# enables logging output for the tahu modules +import logging +logging.basicConfig(level=logging.DEBUG) + +import tahu +import tahu.edge + +import pibrella +import time +import subprocess + +### Commonly configured items +my_group_id = 'sfdev' +my_node_name = 'Python Raspberry Pi' +my_device_name = 'Pibrella' + +# You can define multiple connection setups here, and the edge node will rotate through them +# in response to "Next Server" commands. +my_mqtt_params = [ + #tahu.mqtt_params('192.168.1.25', username='admin', password='changeme'), + #tahu.mqtt_params('securehost', certfile='my_cert.pem', keyfile='my_private.key', tls_enabled=True), + tahu.mqtt_params('broker.hivemq.com'), +] + +# Some handlers for NCMD/DCMD messages from Sparkplug +def cmd_context_write_read(tag, context, value): + logger.info('cmd_context_write_read tag={} context={} value={}'.format(tag.name, + context, + value)) + context.write(value) + tag.change_value(context.read(), send_immediate=False) + +def cmd_buzzer_fail(tag, context, value): + logger.info('cmd_buzzer_fail tag={} context={} value={}'.format(tag.name, + context, + value)) + pibrella.buzzer.fail() + tag.change_value(value, send_immediate=False) + +def cmd_buzzer_success(tag, context, value): + logger.info('cmd_buzzer_success tag={} context={} value={}'.format(tag.name, + context, + value)) + pibrella.buzzer.success() + tag.change_value(value, send_immediate=False) + +my_edge_node = tahu.edge.Node(my_mqtt_params, my_group_id, + my_node_name, u32_in_long=True) + +# Find some interesting info about our system to report +uname_args = 'uname -a'.split() +uname_output = subprocess.check_output(uname_args) +uname_output.strip() +hardware_info = '' +revision_info = '' +serial_info = '' +with open('/proc/cpuinfo', 'r') as cpuinfo: + for line in cpuinfo: + if 'Hardware' in line: + hardware_info = hardware_info + line + elif 'Revision' in line: + revision_info = revision_info + line + elif 'Serial' in line: + serial_info = serial_info + line +tahu.edge.Metric(my_edge_node, 'Parameters/sw_version', + tahu.DataType.String, value=uname_output) +tahu.edge.Metric(my_edge_node, 'Parameters/hw_version', + tahu.DataType.String, value=hardware_info) +tahu.edge.Metric(my_edge_node, 'Parameters/hw_revision', + tahu.DataType.String, value=revision_info) +tahu.edge.Metric(my_edge_node, 'Parameters/hw_serial', + tahu.DataType.String, value=serial_info) + +# Map all the pibrealla ins and outs to metrics +# Save the metric references on inputs so we can use them in the pibrella event handlers +my_subdevice = tahu.edge.Device(my_edge_node, my_device_name) +in_a_metric = tahu.edge.Metric(my_subdevice, 'Inputs/a', + tahu.DataType.Boolean, + value=pibrella.input.a.read()) +in_b_metric = tahu.edge.Metric(my_subdevice, 'Inputs/b', + tahu.DataType.Boolean, + value=pibrella.input.b.read()) +in_c_metric = tahu.edge.Metric(my_subdevice, 'Inputs/c', + tahu.DataType.Boolean, + value=pibrella.input.c.read()) +in_d_metric = tahu.edge.Metric(my_subdevice, 'Inputs/d', + tahu.DataType.Boolean, + value=pibrella.input.d.read()) +button_metric = tahu.edge.Metric(my_subdevice, 'button', + tahu.DataType.Boolean, + value=pibrella.button.read()) +# Setup cmd_handler and cmd_context on outputs to support CMD messages from host applications +tahu.edge.Metric(my_subdevice, 'Outputs/e', tahu.DataType.Boolean, + value=pibrella.output.e.read(), + cmd_handler=cmd_context_write_read, + cmd_context=pibrella.output.e) +tahu.edge.Metric(my_subdevice, 'Outputs/f', tahu.DataType.Boolean, + value=pibrella.output.f.read(), + cmd_handler=cmd_context_write_read, + cmd_context=pibrella.output.f) +tahu.edge.Metric(my_subdevice, 'Outputs/g', tahu.DataType.Boolean, + value=pibrella.output.g.read(), + cmd_handler=cmd_context_write_read, + cmd_context=pibrella.output.g) +tahu.edge.Metric(my_subdevice, 'Outputs/h', tahu.DataType.Boolean, + value=pibrella.output.h.read(), + cmd_handler=cmd_context_write_read, + cmd_context=pibrella.output.h) +tahu.edge.Metric(my_subdevice, 'Outputs/LEDs/green', + tahu.DataType.Boolean, + value=pibrella.light.green.read(), + cmd_handler=cmd_context_write_read, + cmd_context=pibrella.light.green) +tahu.edge.Metric(my_subdevice, 'Outputs/LEDs/red', + tahu.DataType.Boolean, + value=pibrella.light.red.read(), + cmd_handler=cmd_context_write_read, + cmd_context=pibrella.light.red) +tahu.edge.Metric(my_subdevice, 'Outputs/LEDs/yellow', + tahu.DataType.Boolean, + value=pibrella.light.yellow.read(), + cmd_handler=cmd_context_write_read, + cmd_context=pibrella.light.yellow) +tahu.edge.Metric(my_subdevice, 'buzzer_fail', tahu.DataType.Boolean, + value=False, cmd_handler=cmd_buzzer_fail) +tahu.edge.Metric(my_subdevice, 'buzzer_success', + tahu.DataType.Boolean, value=False, + cmd_handler=cmd_buzzer_success) + +# Set up the pibrella input event handlers +pibrella.button.changed(lambda pin: button_metric.change_value(pin.read(), + send_immediate=False)) +pibrella.input.a.changed(lambda pin: in_a_metric.change_value(pin.read(), + send_immediate=False)) +pibrella.input.b.changed(lambda pin: in_b_metric.change_value(pin.read(), + send_immediate=False)) +pibrella.input.c.changed(lambda pin: in_c_metric.change_value(pin.read(), + send_immediate=False)) +pibrella.input.d.changed(lambda pin: in_d_metric.change_value(pin.read(), + send_immediate=False)) + +# Now that we've created all of our devices, metrics, and event handlers, +# we can connect the edge node to the broker. +my_edge_node.online() + +while not my_edge_node.is_connected(): + # TODO - Add some sort of timeout feature? + time.sleep(0.1) + pass + +while True: + # Sit and wait for a moment... + time.sleep(0.1) + + # Send any unsent changes + my_subdevice.send_data(changed_only=True) + my_edge_node.send_data(changed_only=True) + diff --git a/python/examples/simple_example.py b/python/examples/simple_example.py new file mode 100755 index 00000000..53765e14 --- /dev/null +++ b/python/examples/simple_example.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +############################################################################# +# Copyright (c) 2022 Justin Brzozoski +# +# This program and the accompanying materials are made available under the +# terms of the Eclipse Public License 2.0 which is available at +# http://www.eclipse.org/legal/epl-2.0. +# +# SPDX-License-Identifier: EPL-2.0 +# +# Contributors: +# Justin Brzozoski @ SignalFire Wireless Telemetry +############################################################################# + +# This is just about the simplest SparkplugB edge node possible: +# It has NO subdevices and all custom metrics are directly on the edge node +# It has NO support setup to handle commands from the server on any custom metrics +# It just sends a loop counter and current system time to the server every 5 seconds + +# However, the library handles all this: +# The library sets up and handles well-known metrics like "Rebirth" and "Next Server" for you, including server commands +# The library handles all BIRTH/DEATH messages for you +# The library handles all bdSeq, sequence, and other details for you +# The library tries to stay online and will automatically reconnect as needed + +import tahu +import tahu.edge + +# We use time to sleep in our main loop +import time + +# You can run without a logger, but the edge node is quiet without it. +# Since we're okay with all nodes logging to the console and defining +# their own logging IDs, we only need to setup a basic config here. +# We don't need to hold on to any logger objects or pass them around. +import logging +logging.basicConfig(level=logging.INFO) + +# Commonly configured items +# The combination of my_group_id and my_node_name uniquely identify this node to servers +my_group_id = 'Tahu Sample' +my_node_name = 'Simple Node 1' +# This is where you define MQTT connection parameters (hostname, username, password, TLS, etc) +my_mqtt_params = [ tahu.mqtt_params('broker.hivemq.com') ] + +# Here is where we setup one edge node with two custom metrics +my_edge_node = tahu.edge.Node(my_mqtt_params, my_group_id, + my_node_name, + u32_in_long=True) +loop_count_tag = tahu.edge.Metric(my_edge_node, 'loop_count', + datatype=tahu.DataType.UInt32, + value=0) +sys_time_tag = tahu.edge.Metric(my_edge_node, 'sys_time', + datatype=tahu.DataType.DateTime, + value=tahu.timestamp_to_sparkplug()) +# And that's it! + +# Now that we've created all of our devices, metrics, and properties, +# we request the edge node connect to the broker: +# Note: This starts a new thread to handle all connectivity work... +my_edge_node.online() + +print('Press Ctrl-C when you want to quit.') + +# Wait until it is connected +while not my_edge_node.is_connected(): + # TODO - Add some sort of timeout feature? + time.sleep(0.1) + +loop_count = 0 +while True: + # Sit and wait for a moment... + time.sleep(5) + + loop_count = loop_count + 1 + print(f'On loop {loop_count}') + + # Send the latest loop count and current system time + loop_count_tag.change_value(loop_count) + sys_time_tag.change_value(tahu.timestamp_to_sparkplug()) + +# The loop above will never quit, but if you wanted to close down or go offline gracefully, +# just call this function to shut down the connection and the connectivity worker thread. +my_edge_node.offline() + diff --git a/python/pyproject.toml b/python/pyproject.toml new file mode 100644 index 00000000..de11f324 --- /dev/null +++ b/python/pyproject.toml @@ -0,0 +1,7 @@ +[build-system] +requires = [ + "setuptools>=42", + "wheel" +] +build-backend = "setuptools.build_meta" + diff --git a/python/setup.cfg b/python/setup.cfg new file mode 100644 index 00000000..966b7400 --- /dev/null +++ b/python/setup.cfg @@ -0,0 +1,26 @@ +[metadata] +name = tahu-jbrzozoski +version = 0.5.13 +author = Justin Brzozoski +author_email = justin.brzozoski@signal-fire.com +description = Eclipse Tahu Sparkplug/MQTT Client +long_description = file: README.md +long_description_content_type = text/markdown +url = https://github.com/eclipse/tahu +project_urls = + Bug Tracker = https://github.com/eclipse/tahu/issues +classifiers = + Programming Language :: Python :: 3 + License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0) + Operating System :: OS Independent + +[options] +package_dir = + = core +packages = find: +install_requires = + paho-mqtt >= 1.6.1 +python_requires = >=3.6 + +[options.packages.find] +where = core From 51f9fc9fc855a89b59a384304e5a4402eba54d8e Mon Sep 17 00:00:00 2001 From: Justin Brzozoski Date: Wed, 15 Jun 2022 00:02:05 -0400 Subject: [PATCH 2/3] python: Regenerate sparkplug_b_pb2.py with libprotoc 3.21.1 --- python/core/tahu/sparkplug_b_pb2.py | 1367 +-------------------------- 1 file changed, 44 insertions(+), 1323 deletions(-) diff --git a/python/core/tahu/sparkplug_b_pb2.py b/python/core/tahu/sparkplug_b_pb2.py index 4ed53ef0..14d4794b 100644 --- a/python/core/tahu/sparkplug_b_pb2.py +++ b/python/core/tahu/sparkplug_b_pb2.py @@ -1,12 +1,10 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: sparkplug_b.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf.internal import enum_type_wrapper +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) @@ -15,1321 +13,44 @@ -DESCRIPTOR = _descriptor.FileDescriptor( - name='sparkplug_b.proto', - package='org.eclipse.tahu.protobuf', - syntax='proto2', - serialized_options=_b('\n\031org.eclipse.tahu.protobufB\017SparkplugBProto'), - serialized_pb=_b('\n\x11sparkplug_b.proto\x12\x19org.eclipse.tahu.protobuf\"\xee\x15\n\x07Payload\x12\x11\n\ttimestamp\x18\x01 \x01(\x04\x12:\n\x07metrics\x18\x02 \x03(\x0b\x32).org.eclipse.tahu.protobuf.Payload.Metric\x12\x0b\n\x03seq\x18\x03 \x01(\x04\x12\x0c\n\x04uuid\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a\xa6\x04\n\x08Template\x12\x0f\n\x07version\x18\x01 \x01(\t\x12:\n\x07metrics\x18\x02 \x03(\x0b\x32).org.eclipse.tahu.protobuf.Payload.Metric\x12I\n\nparameters\x18\x03 \x03(\x0b\x32\x35.org.eclipse.tahu.protobuf.Payload.Template.Parameter\x12\x14\n\x0ctemplate_ref\x18\x04 \x01(\t\x12\x15\n\ris_definition\x18\x05 \x01(\x08\x1a\xca\x02\n\tParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\r\x12\x13\n\tint_value\x18\x03 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x04 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x05 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x06 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x07 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x08 \x01(\tH\x00\x12h\n\x0f\x65xtension_value\x18\t \x01(\x0b\x32M.org.eclipse.tahu.protobuf.Payload.Template.Parameter.ParameterValueExtensionH\x00\x1a#\n\x17ParameterValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value*\x08\x08\x06\x10\x80\x80\x80\x80\x02\x1a\x97\x04\n\x07\x44\x61taSet\x12\x16\n\x0enum_of_columns\x18\x01 \x01(\x04\x12\x0f\n\x07\x63olumns\x18\x02 \x03(\t\x12\r\n\x05types\x18\x03 \x03(\r\x12<\n\x04rows\x18\x04 \x03(\x0b\x32..org.eclipse.tahu.protobuf.Payload.DataSet.Row\x1a\xaf\x02\n\x0c\x44\x61taSetValue\x12\x13\n\tint_value\x18\x01 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x02 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x05 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12h\n\x0f\x65xtension_value\x18\x07 \x01(\x0b\x32M.org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.DataSetValueExtensionH\x00\x1a!\n\x15\x44\x61taSetValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value\x1aZ\n\x03Row\x12I\n\x08\x65lements\x18\x01 \x03(\x0b\x32\x37.org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue*\x08\x08\x02\x10\x80\x80\x80\x80\x02*\x08\x08\x05\x10\x80\x80\x80\x80\x02\x1a\xe9\x03\n\rPropertyValue\x12\x0c\n\x04type\x18\x01 \x01(\r\x12\x0f\n\x07is_null\x18\x02 \x01(\x08\x12\x13\n\tint_value\x18\x03 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x04 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x05 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x06 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x07 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x08 \x01(\tH\x00\x12K\n\x11propertyset_value\x18\t \x01(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySetH\x00\x12P\n\x12propertysets_value\x18\n \x01(\x0b\x32\x32.org.eclipse.tahu.protobuf.Payload.PropertySetListH\x00\x12\x62\n\x0f\x65xtension_value\x18\x0b \x01(\x0b\x32G.org.eclipse.tahu.protobuf.Payload.PropertyValue.PropertyValueExtensionH\x00\x1a\"\n\x16PropertyValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value\x1ag\n\x0bPropertySet\x12\x0c\n\x04keys\x18\x01 \x03(\t\x12@\n\x06values\x18\x02 \x03(\x0b\x32\x30.org.eclipse.tahu.protobuf.Payload.PropertyValue*\x08\x08\x03\x10\x80\x80\x80\x80\x02\x1a`\n\x0fPropertySetList\x12\x43\n\x0bpropertyset\x18\x01 \x03(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySet*\x08\x08\x02\x10\x80\x80\x80\x80\x02\x1a\xa4\x01\n\x08MetaData\x12\x15\n\ris_multi_part\x18\x01 \x01(\x08\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\t\x12\x0c\n\x04size\x18\x03 \x01(\x04\x12\x0b\n\x03seq\x18\x04 \x01(\x04\x12\x11\n\tfile_name\x18\x05 \x01(\t\x12\x11\n\tfile_type\x18\x06 \x01(\t\x12\x0b\n\x03md5\x18\x07 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x08 \x01(\t*\x08\x08\t\x10\x80\x80\x80\x80\x02\x1a\xbf\x05\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05\x61lias\x18\x02 \x01(\x04\x12\x11\n\ttimestamp\x18\x03 \x01(\x04\x12\x10\n\x08\x64\x61tatype\x18\x04 \x01(\r\x12\x15\n\ris_historical\x18\x05 \x01(\x08\x12\x14\n\x0cis_transient\x18\x06 \x01(\x08\x12\x0f\n\x07is_null\x18\x07 \x01(\x08\x12=\n\x08metadata\x18\x08 \x01(\x0b\x32+.org.eclipse.tahu.protobuf.Payload.MetaData\x12\x42\n\nproperties\x18\t \x01(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySet\x12\x13\n\tint_value\x18\n \x01(\rH\x00\x12\x14\n\nlong_value\x18\x0b \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x0c \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\r \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x0e \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x0f \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x10 \x01(\x0cH\x00\x12\x43\n\rdataset_value\x18\x11 \x01(\x0b\x32*.org.eclipse.tahu.protobuf.Payload.DataSetH\x00\x12\x45\n\x0etemplate_value\x18\x12 \x01(\x0b\x32+.org.eclipse.tahu.protobuf.Payload.TemplateH\x00\x12Y\n\x0f\x65xtension_value\x18\x13 \x01(\x0b\x32>.org.eclipse.tahu.protobuf.Payload.Metric.MetricValueExtensionH\x00\x1a \n\x14MetricValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value*\x08\x08\x06\x10\x80\x80\x80\x80\x02*\xf2\x03\n\x08\x44\x61taType\x12\x0b\n\x07Unknown\x10\x00\x12\x08\n\x04Int8\x10\x01\x12\t\n\x05Int16\x10\x02\x12\t\n\x05Int32\x10\x03\x12\t\n\x05Int64\x10\x04\x12\t\n\x05UInt8\x10\x05\x12\n\n\x06UInt16\x10\x06\x12\n\n\x06UInt32\x10\x07\x12\n\n\x06UInt64\x10\x08\x12\t\n\x05\x46loat\x10\t\x12\n\n\x06\x44ouble\x10\n\x12\x0b\n\x07\x42oolean\x10\x0b\x12\n\n\x06String\x10\x0c\x12\x0c\n\x08\x44\x61teTime\x10\r\x12\x08\n\x04Text\x10\x0e\x12\x08\n\x04UUID\x10\x0f\x12\x0b\n\x07\x44\x61taSet\x10\x10\x12\t\n\x05\x42ytes\x10\x11\x12\x08\n\x04\x46ile\x10\x12\x12\x0c\n\x08Template\x10\x13\x12\x0f\n\x0bPropertySet\x10\x14\x12\x13\n\x0fPropertySetList\x10\x15\x12\r\n\tInt8Array\x10\x16\x12\x0e\n\nInt16Array\x10\x17\x12\x0e\n\nInt32Array\x10\x18\x12\x0e\n\nInt64Array\x10\x19\x12\x0e\n\nUInt8Array\x10\x1a\x12\x0f\n\x0bUInt16Array\x10\x1b\x12\x0f\n\x0bUInt32Array\x10\x1c\x12\x0f\n\x0bUInt64Array\x10\x1d\x12\x0e\n\nFloatArray\x10\x1e\x12\x0f\n\x0b\x44oubleArray\x10\x1f\x12\x10\n\x0c\x42ooleanArray\x10 \x12\x0f\n\x0bStringArray\x10!\x12\x11\n\rDateTimeArray\x10\"B,\n\x19org.eclipse.tahu.protobufB\x0fSparkplugBProto') -) - -_DATATYPE = _descriptor.EnumDescriptor( - name='DataType', - full_name='org.eclipse.tahu.protobuf.DataType', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='Unknown', index=0, number=0, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Int8', index=1, number=1, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Int16', index=2, number=2, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Int32', index=3, number=3, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Int64', index=4, number=4, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='UInt8', index=5, number=5, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='UInt16', index=6, number=6, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='UInt32', index=7, number=7, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='UInt64', index=8, number=8, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Float', index=9, number=9, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Double', index=10, number=10, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Boolean', index=11, number=11, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='String', index=12, number=12, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='DateTime', index=13, number=13, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Text', index=14, number=14, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='UUID', index=15, number=15, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='DataSet', index=16, number=16, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Bytes', index=17, number=17, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='File', index=18, number=18, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Template', index=19, number=19, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='PropertySet', index=20, number=20, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='PropertySetList', index=21, number=21, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Int8Array', index=22, number=22, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Int16Array', index=23, number=23, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Int32Array', index=24, number=24, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='Int64Array', index=25, number=25, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='UInt8Array', index=26, number=26, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='UInt16Array', index=27, number=27, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='UInt32Array', index=28, number=28, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='UInt64Array', index=29, number=29, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='FloatArray', index=30, number=30, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='DoubleArray', index=31, number=31, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='BooleanArray', index=32, number=32, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='StringArray', index=33, number=33, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='DateTimeArray', index=34, number=34, - serialized_options=None, - type=None), - ], - containing_type=None, - serialized_options=None, - serialized_start=2850, - serialized_end=3348, -) -_sym_db.RegisterEnumDescriptor(_DATATYPE) - -DataType = enum_type_wrapper.EnumTypeWrapper(_DATATYPE) -Unknown = 0 -Int8 = 1 -Int16 = 2 -Int32 = 3 -Int64 = 4 -UInt8 = 5 -UInt16 = 6 -UInt32 = 7 -UInt64 = 8 -Float = 9 -Double = 10 -Boolean = 11 -String = 12 -DateTime = 13 -Text = 14 -UUID = 15 -DataSet = 16 -Bytes = 17 -File = 18 -Template = 19 -PropertySet = 20 -PropertySetList = 21 -Int8Array = 22 -Int16Array = 23 -Int32Array = 24 -Int64Array = 25 -UInt8Array = 26 -UInt16Array = 27 -UInt32Array = 28 -UInt64Array = 29 -FloatArray = 30 -DoubleArray = 31 -BooleanArray = 32 -StringArray = 33 -DateTimeArray = 34 - - - -_PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION = _descriptor.Descriptor( - name='ParameterValueExtension', - full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.ParameterValueExtension', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1, 536870912), ], - oneofs=[ - ], - serialized_start=677, - serialized_end=712, -) - -_PAYLOAD_TEMPLATE_PARAMETER = _descriptor.Descriptor( - name='Parameter', - full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='type', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.type', index=1, - number=2, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='int_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.int_value', index=2, - number=3, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='long_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.long_value', index=3, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='float_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.float_value', index=4, - number=5, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='double_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.double_value', index=5, - number=6, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='boolean_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.boolean_value', index=6, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='string_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.string_value', index=7, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='extension_value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.extension_value', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='value', full_name='org.eclipse.tahu.protobuf.Payload.Template.Parameter.value', - index=0, containing_type=None, fields=[]), - ], - serialized_start=391, - serialized_end=721, -) - -_PAYLOAD_TEMPLATE = _descriptor.Descriptor( - name='Template', - full_name='org.eclipse.tahu.protobuf.Payload.Template', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='version', full_name='org.eclipse.tahu.protobuf.Payload.Template.version', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='metrics', full_name='org.eclipse.tahu.protobuf.Payload.Template.metrics', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='parameters', full_name='org.eclipse.tahu.protobuf.Payload.Template.parameters', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='template_ref', full_name='org.eclipse.tahu.protobuf.Payload.Template.template_ref', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_definition', full_name='org.eclipse.tahu.protobuf.Payload.Template.is_definition', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_PAYLOAD_TEMPLATE_PARAMETER, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(6, 536870912), ], - oneofs=[ - ], - serialized_start=181, - serialized_end=731, -) - -_PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION = _descriptor.Descriptor( - name='DataSetValueExtension', - full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.DataSetValueExtension', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1, 536870912), ], - oneofs=[ - ], - serialized_start=1125, - serialized_end=1158, -) - -_PAYLOAD_DATASET_DATASETVALUE = _descriptor.Descriptor( - name='DataSetValue', - full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='int_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.int_value', index=0, - number=1, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='long_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.long_value', index=1, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='float_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.float_value', index=2, - number=3, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='double_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.double_value', index=3, - number=4, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='boolean_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.boolean_value', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='string_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.string_value', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='extension_value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.extension_value', index=6, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='value', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.value', - index=0, containing_type=None, fields=[]), - ], - serialized_start=864, - serialized_end=1167, -) - -_PAYLOAD_DATASET_ROW = _descriptor.Descriptor( - name='Row', - full_name='org.eclipse.tahu.protobuf.Payload.DataSet.Row', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='elements', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.Row.elements', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(2, 536870912), ], - oneofs=[ - ], - serialized_start=1169, - serialized_end=1259, -) - -_PAYLOAD_DATASET = _descriptor.Descriptor( - name='DataSet', - full_name='org.eclipse.tahu.protobuf.Payload.DataSet', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='num_of_columns', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.num_of_columns', index=0, - number=1, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='columns', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.columns', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='types', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.types', index=2, - number=3, type=13, cpp_type=3, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='rows', full_name='org.eclipse.tahu.protobuf.Payload.DataSet.rows', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_PAYLOAD_DATASET_DATASETVALUE, _PAYLOAD_DATASET_ROW, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(5, 536870912), ], - oneofs=[ - ], - serialized_start=734, - serialized_end=1269, -) - -_PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION = _descriptor.Descriptor( - name='PropertyValueExtension', - full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.PropertyValueExtension', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1, 536870912), ], - oneofs=[ - ], - serialized_start=1718, - serialized_end=1752, -) - -_PAYLOAD_PROPERTYVALUE = _descriptor.Descriptor( - name='PropertyValue', - full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='type', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.type', index=0, - number=1, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_null', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.is_null', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='int_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.int_value', index=2, - number=3, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='long_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.long_value', index=3, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='float_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.float_value', index=4, - number=5, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='double_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.double_value', index=5, - number=6, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='boolean_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.boolean_value', index=6, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='string_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.string_value', index=7, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='propertyset_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.propertyset_value', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='propertysets_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.propertysets_value', index=9, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='extension_value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.extension_value', index=10, - number=11, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='value', full_name='org.eclipse.tahu.protobuf.Payload.PropertyValue.value', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1272, - serialized_end=1761, -) - -_PAYLOAD_PROPERTYSET = _descriptor.Descriptor( - name='PropertySet', - full_name='org.eclipse.tahu.protobuf.Payload.PropertySet', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='keys', full_name='org.eclipse.tahu.protobuf.Payload.PropertySet.keys', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='values', full_name='org.eclipse.tahu.protobuf.Payload.PropertySet.values', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(3, 536870912), ], - oneofs=[ - ], - serialized_start=1763, - serialized_end=1866, -) - -_PAYLOAD_PROPERTYSETLIST = _descriptor.Descriptor( - name='PropertySetList', - full_name='org.eclipse.tahu.protobuf.Payload.PropertySetList', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='propertyset', full_name='org.eclipse.tahu.protobuf.Payload.PropertySetList.propertyset', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(2, 536870912), ], - oneofs=[ - ], - serialized_start=1868, - serialized_end=1964, -) - -_PAYLOAD_METADATA = _descriptor.Descriptor( - name='MetaData', - full_name='org.eclipse.tahu.protobuf.Payload.MetaData', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='is_multi_part', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.is_multi_part', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='content_type', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.content_type', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='size', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.size', index=2, - number=3, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='seq', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.seq', index=3, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='file_name', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.file_name', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='file_type', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.file_type', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='md5', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.md5', index=6, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='description', full_name='org.eclipse.tahu.protobuf.Payload.MetaData.description', index=7, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(9, 536870912), ], - oneofs=[ - ], - serialized_start=1967, - serialized_end=2131, -) - -_PAYLOAD_METRIC_METRICVALUEEXTENSION = _descriptor.Descriptor( - name='MetricValueExtension', - full_name='org.eclipse.tahu.protobuf.Payload.Metric.MetricValueExtension', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1, 536870912), ], - oneofs=[ - ], - serialized_start=2796, - serialized_end=2828, -) - -_PAYLOAD_METRIC = _descriptor.Descriptor( - name='Metric', - full_name='org.eclipse.tahu.protobuf.Payload.Metric', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='org.eclipse.tahu.protobuf.Payload.Metric.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='alias', full_name='org.eclipse.tahu.protobuf.Payload.Metric.alias', index=1, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='timestamp', full_name='org.eclipse.tahu.protobuf.Payload.Metric.timestamp', index=2, - number=3, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='datatype', full_name='org.eclipse.tahu.protobuf.Payload.Metric.datatype', index=3, - number=4, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_historical', full_name='org.eclipse.tahu.protobuf.Payload.Metric.is_historical', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_transient', full_name='org.eclipse.tahu.protobuf.Payload.Metric.is_transient', index=5, - number=6, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_null', full_name='org.eclipse.tahu.protobuf.Payload.Metric.is_null', index=6, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='metadata', full_name='org.eclipse.tahu.protobuf.Payload.Metric.metadata', index=7, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='properties', full_name='org.eclipse.tahu.protobuf.Payload.Metric.properties', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='int_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.int_value', index=9, - number=10, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='long_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.long_value', index=10, - number=11, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='float_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.float_value', index=11, - number=12, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='double_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.double_value', index=12, - number=13, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='boolean_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.boolean_value', index=13, - number=14, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='string_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.string_value', index=14, - number=15, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bytes_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.bytes_value', index=15, - number=16, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='dataset_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.dataset_value', index=16, - number=17, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='template_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.template_value', index=17, - number=18, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='extension_value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.extension_value', index=18, - number=19, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_PAYLOAD_METRIC_METRICVALUEEXTENSION, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='value', full_name='org.eclipse.tahu.protobuf.Payload.Metric.value', - index=0, containing_type=None, fields=[]), - ], - serialized_start=2134, - serialized_end=2837, -) - -_PAYLOAD = _descriptor.Descriptor( - name='Payload', - full_name='org.eclipse.tahu.protobuf.Payload', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='timestamp', full_name='org.eclipse.tahu.protobuf.Payload.timestamp', index=0, - number=1, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='metrics', full_name='org.eclipse.tahu.protobuf.Payload.metrics', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='seq', full_name='org.eclipse.tahu.protobuf.Payload.seq', index=2, - number=3, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='uuid', full_name='org.eclipse.tahu.protobuf.Payload.uuid', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='body', full_name='org.eclipse.tahu.protobuf.Payload.body', index=4, - number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_PAYLOAD_TEMPLATE, _PAYLOAD_DATASET, _PAYLOAD_PROPERTYVALUE, _PAYLOAD_PROPERTYSET, _PAYLOAD_PROPERTYSETLIST, _PAYLOAD_METADATA, _PAYLOAD_METRIC, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(6, 536870912), ], - oneofs=[ - ], - serialized_start=49, - serialized_end=2847, -) - -_PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION.containing_type = _PAYLOAD_TEMPLATE_PARAMETER -_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['extension_value'].message_type = _PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION -_PAYLOAD_TEMPLATE_PARAMETER.containing_type = _PAYLOAD_TEMPLATE -_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'].fields.append( - _PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['int_value']) -_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['int_value'].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'] -_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'].fields.append( - _PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['long_value']) -_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['long_value'].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'] -_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'].fields.append( - _PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['float_value']) -_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['float_value'].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'] -_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'].fields.append( - _PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['double_value']) -_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['double_value'].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'] -_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'].fields.append( - _PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['boolean_value']) -_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['boolean_value'].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'] -_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'].fields.append( - _PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['string_value']) -_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['string_value'].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'] -_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'].fields.append( - _PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['extension_value']) -_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name['extension_value'].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name['value'] -_PAYLOAD_TEMPLATE.fields_by_name['metrics'].message_type = _PAYLOAD_METRIC -_PAYLOAD_TEMPLATE.fields_by_name['parameters'].message_type = _PAYLOAD_TEMPLATE_PARAMETER -_PAYLOAD_TEMPLATE.containing_type = _PAYLOAD -_PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION.containing_type = _PAYLOAD_DATASET_DATASETVALUE -_PAYLOAD_DATASET_DATASETVALUE.fields_by_name['extension_value'].message_type = _PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION -_PAYLOAD_DATASET_DATASETVALUE.containing_type = _PAYLOAD_DATASET -_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_DATASET_DATASETVALUE.fields_by_name['int_value']) -_PAYLOAD_DATASET_DATASETVALUE.fields_by_name['int_value'].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'] -_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_DATASET_DATASETVALUE.fields_by_name['long_value']) -_PAYLOAD_DATASET_DATASETVALUE.fields_by_name['long_value'].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'] -_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_DATASET_DATASETVALUE.fields_by_name['float_value']) -_PAYLOAD_DATASET_DATASETVALUE.fields_by_name['float_value'].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'] -_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_DATASET_DATASETVALUE.fields_by_name['double_value']) -_PAYLOAD_DATASET_DATASETVALUE.fields_by_name['double_value'].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'] -_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_DATASET_DATASETVALUE.fields_by_name['boolean_value']) -_PAYLOAD_DATASET_DATASETVALUE.fields_by_name['boolean_value'].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'] -_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_DATASET_DATASETVALUE.fields_by_name['string_value']) -_PAYLOAD_DATASET_DATASETVALUE.fields_by_name['string_value'].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'] -_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_DATASET_DATASETVALUE.fields_by_name['extension_value']) -_PAYLOAD_DATASET_DATASETVALUE.fields_by_name['extension_value'].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name['value'] -_PAYLOAD_DATASET_ROW.fields_by_name['elements'].message_type = _PAYLOAD_DATASET_DATASETVALUE -_PAYLOAD_DATASET_ROW.containing_type = _PAYLOAD_DATASET -_PAYLOAD_DATASET.fields_by_name['rows'].message_type = _PAYLOAD_DATASET_ROW -_PAYLOAD_DATASET.containing_type = _PAYLOAD -_PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION.containing_type = _PAYLOAD_PROPERTYVALUE -_PAYLOAD_PROPERTYVALUE.fields_by_name['propertyset_value'].message_type = _PAYLOAD_PROPERTYSET -_PAYLOAD_PROPERTYVALUE.fields_by_name['propertysets_value'].message_type = _PAYLOAD_PROPERTYSETLIST -_PAYLOAD_PROPERTYVALUE.fields_by_name['extension_value'].message_type = _PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION -_PAYLOAD_PROPERTYVALUE.containing_type = _PAYLOAD -_PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_PROPERTYVALUE.fields_by_name['int_value']) -_PAYLOAD_PROPERTYVALUE.fields_by_name['int_value'].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'] -_PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_PROPERTYVALUE.fields_by_name['long_value']) -_PAYLOAD_PROPERTYVALUE.fields_by_name['long_value'].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'] -_PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_PROPERTYVALUE.fields_by_name['float_value']) -_PAYLOAD_PROPERTYVALUE.fields_by_name['float_value'].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'] -_PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_PROPERTYVALUE.fields_by_name['double_value']) -_PAYLOAD_PROPERTYVALUE.fields_by_name['double_value'].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'] -_PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_PROPERTYVALUE.fields_by_name['boolean_value']) -_PAYLOAD_PROPERTYVALUE.fields_by_name['boolean_value'].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'] -_PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_PROPERTYVALUE.fields_by_name['string_value']) -_PAYLOAD_PROPERTYVALUE.fields_by_name['string_value'].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'] -_PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_PROPERTYVALUE.fields_by_name['propertyset_value']) -_PAYLOAD_PROPERTYVALUE.fields_by_name['propertyset_value'].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'] -_PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_PROPERTYVALUE.fields_by_name['propertysets_value']) -_PAYLOAD_PROPERTYVALUE.fields_by_name['propertysets_value'].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'] -_PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'].fields.append( - _PAYLOAD_PROPERTYVALUE.fields_by_name['extension_value']) -_PAYLOAD_PROPERTYVALUE.fields_by_name['extension_value'].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name['value'] -_PAYLOAD_PROPERTYSET.fields_by_name['values'].message_type = _PAYLOAD_PROPERTYVALUE -_PAYLOAD_PROPERTYSET.containing_type = _PAYLOAD -_PAYLOAD_PROPERTYSETLIST.fields_by_name['propertyset'].message_type = _PAYLOAD_PROPERTYSET -_PAYLOAD_PROPERTYSETLIST.containing_type = _PAYLOAD -_PAYLOAD_METADATA.containing_type = _PAYLOAD -_PAYLOAD_METRIC_METRICVALUEEXTENSION.containing_type = _PAYLOAD_METRIC -_PAYLOAD_METRIC.fields_by_name['metadata'].message_type = _PAYLOAD_METADATA -_PAYLOAD_METRIC.fields_by_name['properties'].message_type = _PAYLOAD_PROPERTYSET -_PAYLOAD_METRIC.fields_by_name['dataset_value'].message_type = _PAYLOAD_DATASET -_PAYLOAD_METRIC.fields_by_name['template_value'].message_type = _PAYLOAD_TEMPLATE -_PAYLOAD_METRIC.fields_by_name['extension_value'].message_type = _PAYLOAD_METRIC_METRICVALUEEXTENSION -_PAYLOAD_METRIC.containing_type = _PAYLOAD -_PAYLOAD_METRIC.oneofs_by_name['value'].fields.append( - _PAYLOAD_METRIC.fields_by_name['int_value']) -_PAYLOAD_METRIC.fields_by_name['int_value'].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name['value'] -_PAYLOAD_METRIC.oneofs_by_name['value'].fields.append( - _PAYLOAD_METRIC.fields_by_name['long_value']) -_PAYLOAD_METRIC.fields_by_name['long_value'].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name['value'] -_PAYLOAD_METRIC.oneofs_by_name['value'].fields.append( - _PAYLOAD_METRIC.fields_by_name['float_value']) -_PAYLOAD_METRIC.fields_by_name['float_value'].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name['value'] -_PAYLOAD_METRIC.oneofs_by_name['value'].fields.append( - _PAYLOAD_METRIC.fields_by_name['double_value']) -_PAYLOAD_METRIC.fields_by_name['double_value'].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name['value'] -_PAYLOAD_METRIC.oneofs_by_name['value'].fields.append( - _PAYLOAD_METRIC.fields_by_name['boolean_value']) -_PAYLOAD_METRIC.fields_by_name['boolean_value'].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name['value'] -_PAYLOAD_METRIC.oneofs_by_name['value'].fields.append( - _PAYLOAD_METRIC.fields_by_name['string_value']) -_PAYLOAD_METRIC.fields_by_name['string_value'].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name['value'] -_PAYLOAD_METRIC.oneofs_by_name['value'].fields.append( - _PAYLOAD_METRIC.fields_by_name['bytes_value']) -_PAYLOAD_METRIC.fields_by_name['bytes_value'].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name['value'] -_PAYLOAD_METRIC.oneofs_by_name['value'].fields.append( - _PAYLOAD_METRIC.fields_by_name['dataset_value']) -_PAYLOAD_METRIC.fields_by_name['dataset_value'].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name['value'] -_PAYLOAD_METRIC.oneofs_by_name['value'].fields.append( - _PAYLOAD_METRIC.fields_by_name['template_value']) -_PAYLOAD_METRIC.fields_by_name['template_value'].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name['value'] -_PAYLOAD_METRIC.oneofs_by_name['value'].fields.append( - _PAYLOAD_METRIC.fields_by_name['extension_value']) -_PAYLOAD_METRIC.fields_by_name['extension_value'].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name['value'] -_PAYLOAD.fields_by_name['metrics'].message_type = _PAYLOAD_METRIC -DESCRIPTOR.message_types_by_name['Payload'] = _PAYLOAD -DESCRIPTOR.enum_types_by_name['DataType'] = _DATATYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Payload = _reflection.GeneratedProtocolMessageType('Payload', (_message.Message,), dict( - - Template = _reflection.GeneratedProtocolMessageType('Template', (_message.Message,), dict( - - Parameter = _reflection.GeneratedProtocolMessageType('Parameter', (_message.Message,), dict( - - ParameterValueExtension = _reflection.GeneratedProtocolMessageType('ParameterValueExtension', (_message.Message,), dict( - DESCRIPTOR = _PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.Template.Parameter.ParameterValueExtension) - )) - , - DESCRIPTOR = _PAYLOAD_TEMPLATE_PARAMETER, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.Template.Parameter) - )) - , - DESCRIPTOR = _PAYLOAD_TEMPLATE, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.Template) - )) - , - - DataSet = _reflection.GeneratedProtocolMessageType('DataSet', (_message.Message,), dict( - - DataSetValue = _reflection.GeneratedProtocolMessageType('DataSetValue', (_message.Message,), dict( - - DataSetValueExtension = _reflection.GeneratedProtocolMessageType('DataSetValueExtension', (_message.Message,), dict( - DESCRIPTOR = _PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.DataSetValueExtension) - )) - , - DESCRIPTOR = _PAYLOAD_DATASET_DATASETVALUE, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue) - )) - , - - Row = _reflection.GeneratedProtocolMessageType('Row', (_message.Message,), dict( - DESCRIPTOR = _PAYLOAD_DATASET_ROW, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.DataSet.Row) - )) - , - DESCRIPTOR = _PAYLOAD_DATASET, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.DataSet) - )) - , - - PropertyValue = _reflection.GeneratedProtocolMessageType('PropertyValue', (_message.Message,), dict( - - PropertyValueExtension = _reflection.GeneratedProtocolMessageType('PropertyValueExtension', (_message.Message,), dict( - DESCRIPTOR = _PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.PropertyValue.PropertyValueExtension) - )) - , - DESCRIPTOR = _PAYLOAD_PROPERTYVALUE, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.PropertyValue) - )) - , - - PropertySet = _reflection.GeneratedProtocolMessageType('PropertySet', (_message.Message,), dict( - DESCRIPTOR = _PAYLOAD_PROPERTYSET, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.PropertySet) - )) - , - - PropertySetList = _reflection.GeneratedProtocolMessageType('PropertySetList', (_message.Message,), dict( - DESCRIPTOR = _PAYLOAD_PROPERTYSETLIST, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.PropertySetList) - )) - , - - MetaData = _reflection.GeneratedProtocolMessageType('MetaData', (_message.Message,), dict( - DESCRIPTOR = _PAYLOAD_METADATA, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.MetaData) - )) - , - - Metric = _reflection.GeneratedProtocolMessageType('Metric', (_message.Message,), dict( - - MetricValueExtension = _reflection.GeneratedProtocolMessageType('MetricValueExtension', (_message.Message,), dict( - DESCRIPTOR = _PAYLOAD_METRIC_METRICVALUEEXTENSION, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.Metric.MetricValueExtension) - )) - , - DESCRIPTOR = _PAYLOAD_METRIC, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload.Metric) - )) - , - DESCRIPTOR = _PAYLOAD, - __module__ = 'sparkplug_b_pb2' - # @@protoc_insertion_point(class_scope:org.eclipse.tahu.protobuf.Payload) - )) -_sym_db.RegisterMessage(Payload) -_sym_db.RegisterMessage(Payload.Template) -_sym_db.RegisterMessage(Payload.Template.Parameter) -_sym_db.RegisterMessage(Payload.Template.Parameter.ParameterValueExtension) -_sym_db.RegisterMessage(Payload.DataSet) -_sym_db.RegisterMessage(Payload.DataSet.DataSetValue) -_sym_db.RegisterMessage(Payload.DataSet.DataSetValue.DataSetValueExtension) -_sym_db.RegisterMessage(Payload.DataSet.Row) -_sym_db.RegisterMessage(Payload.PropertyValue) -_sym_db.RegisterMessage(Payload.PropertyValue.PropertyValueExtension) -_sym_db.RegisterMessage(Payload.PropertySet) -_sym_db.RegisterMessage(Payload.PropertySetList) -_sym_db.RegisterMessage(Payload.MetaData) -_sym_db.RegisterMessage(Payload.Metric) -_sym_db.RegisterMessage(Payload.Metric.MetricValueExtension) - - -DESCRIPTOR._options = None +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11sparkplug_b.proto\x12\x19org.eclipse.tahu.protobuf\"\xee\x15\n\x07Payload\x12\x11\n\ttimestamp\x18\x01 \x01(\x04\x12:\n\x07metrics\x18\x02 \x03(\x0b\x32).org.eclipse.tahu.protobuf.Payload.Metric\x12\x0b\n\x03seq\x18\x03 \x01(\x04\x12\x0c\n\x04uuid\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a\xa6\x04\n\x08Template\x12\x0f\n\x07version\x18\x01 \x01(\t\x12:\n\x07metrics\x18\x02 \x03(\x0b\x32).org.eclipse.tahu.protobuf.Payload.Metric\x12I\n\nparameters\x18\x03 \x03(\x0b\x32\x35.org.eclipse.tahu.protobuf.Payload.Template.Parameter\x12\x14\n\x0ctemplate_ref\x18\x04 \x01(\t\x12\x15\n\ris_definition\x18\x05 \x01(\x08\x1a\xca\x02\n\tParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\r\x12\x13\n\tint_value\x18\x03 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x04 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x05 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x06 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x07 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x08 \x01(\tH\x00\x12h\n\x0f\x65xtension_value\x18\t \x01(\x0b\x32M.org.eclipse.tahu.protobuf.Payload.Template.Parameter.ParameterValueExtensionH\x00\x1a#\n\x17ParameterValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value*\x08\x08\x06\x10\x80\x80\x80\x80\x02\x1a\x97\x04\n\x07\x44\x61taSet\x12\x16\n\x0enum_of_columns\x18\x01 \x01(\x04\x12\x0f\n\x07\x63olumns\x18\x02 \x03(\t\x12\r\n\x05types\x18\x03 \x03(\r\x12<\n\x04rows\x18\x04 \x03(\x0b\x32..org.eclipse.tahu.protobuf.Payload.DataSet.Row\x1a\xaf\x02\n\x0c\x44\x61taSetValue\x12\x13\n\tint_value\x18\x01 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x02 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x05 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12h\n\x0f\x65xtension_value\x18\x07 \x01(\x0b\x32M.org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue.DataSetValueExtensionH\x00\x1a!\n\x15\x44\x61taSetValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value\x1aZ\n\x03Row\x12I\n\x08\x65lements\x18\x01 \x03(\x0b\x32\x37.org.eclipse.tahu.protobuf.Payload.DataSet.DataSetValue*\x08\x08\x02\x10\x80\x80\x80\x80\x02*\x08\x08\x05\x10\x80\x80\x80\x80\x02\x1a\xe9\x03\n\rPropertyValue\x12\x0c\n\x04type\x18\x01 \x01(\r\x12\x0f\n\x07is_null\x18\x02 \x01(\x08\x12\x13\n\tint_value\x18\x03 \x01(\rH\x00\x12\x14\n\nlong_value\x18\x04 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x05 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x06 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x07 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x08 \x01(\tH\x00\x12K\n\x11propertyset_value\x18\t \x01(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySetH\x00\x12P\n\x12propertysets_value\x18\n \x01(\x0b\x32\x32.org.eclipse.tahu.protobuf.Payload.PropertySetListH\x00\x12\x62\n\x0f\x65xtension_value\x18\x0b \x01(\x0b\x32G.org.eclipse.tahu.protobuf.Payload.PropertyValue.PropertyValueExtensionH\x00\x1a\"\n\x16PropertyValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value\x1ag\n\x0bPropertySet\x12\x0c\n\x04keys\x18\x01 \x03(\t\x12@\n\x06values\x18\x02 \x03(\x0b\x32\x30.org.eclipse.tahu.protobuf.Payload.PropertyValue*\x08\x08\x03\x10\x80\x80\x80\x80\x02\x1a`\n\x0fPropertySetList\x12\x43\n\x0bpropertyset\x18\x01 \x03(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySet*\x08\x08\x02\x10\x80\x80\x80\x80\x02\x1a\xa4\x01\n\x08MetaData\x12\x15\n\ris_multi_part\x18\x01 \x01(\x08\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\t\x12\x0c\n\x04size\x18\x03 \x01(\x04\x12\x0b\n\x03seq\x18\x04 \x01(\x04\x12\x11\n\tfile_name\x18\x05 \x01(\t\x12\x11\n\tfile_type\x18\x06 \x01(\t\x12\x0b\n\x03md5\x18\x07 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x08 \x01(\t*\x08\x08\t\x10\x80\x80\x80\x80\x02\x1a\xbf\x05\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05\x61lias\x18\x02 \x01(\x04\x12\x11\n\ttimestamp\x18\x03 \x01(\x04\x12\x10\n\x08\x64\x61tatype\x18\x04 \x01(\r\x12\x15\n\ris_historical\x18\x05 \x01(\x08\x12\x14\n\x0cis_transient\x18\x06 \x01(\x08\x12\x0f\n\x07is_null\x18\x07 \x01(\x08\x12=\n\x08metadata\x18\x08 \x01(\x0b\x32+.org.eclipse.tahu.protobuf.Payload.MetaData\x12\x42\n\nproperties\x18\t \x01(\x0b\x32..org.eclipse.tahu.protobuf.Payload.PropertySet\x12\x13\n\tint_value\x18\n \x01(\rH\x00\x12\x14\n\nlong_value\x18\x0b \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x0c \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\r \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x0e \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x0f \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x10 \x01(\x0cH\x00\x12\x43\n\rdataset_value\x18\x11 \x01(\x0b\x32*.org.eclipse.tahu.protobuf.Payload.DataSetH\x00\x12\x45\n\x0etemplate_value\x18\x12 \x01(\x0b\x32+.org.eclipse.tahu.protobuf.Payload.TemplateH\x00\x12Y\n\x0f\x65xtension_value\x18\x13 \x01(\x0b\x32>.org.eclipse.tahu.protobuf.Payload.Metric.MetricValueExtensionH\x00\x1a \n\x14MetricValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\x42\x07\n\x05value*\x08\x08\x06\x10\x80\x80\x80\x80\x02*\xf2\x03\n\x08\x44\x61taType\x12\x0b\n\x07Unknown\x10\x00\x12\x08\n\x04Int8\x10\x01\x12\t\n\x05Int16\x10\x02\x12\t\n\x05Int32\x10\x03\x12\t\n\x05Int64\x10\x04\x12\t\n\x05UInt8\x10\x05\x12\n\n\x06UInt16\x10\x06\x12\n\n\x06UInt32\x10\x07\x12\n\n\x06UInt64\x10\x08\x12\t\n\x05\x46loat\x10\t\x12\n\n\x06\x44ouble\x10\n\x12\x0b\n\x07\x42oolean\x10\x0b\x12\n\n\x06String\x10\x0c\x12\x0c\n\x08\x44\x61teTime\x10\r\x12\x08\n\x04Text\x10\x0e\x12\x08\n\x04UUID\x10\x0f\x12\x0b\n\x07\x44\x61taSet\x10\x10\x12\t\n\x05\x42ytes\x10\x11\x12\x08\n\x04\x46ile\x10\x12\x12\x0c\n\x08Template\x10\x13\x12\x0f\n\x0bPropertySet\x10\x14\x12\x13\n\x0fPropertySetList\x10\x15\x12\r\n\tInt8Array\x10\x16\x12\x0e\n\nInt16Array\x10\x17\x12\x0e\n\nInt32Array\x10\x18\x12\x0e\n\nInt64Array\x10\x19\x12\x0e\n\nUInt8Array\x10\x1a\x12\x0f\n\x0bUInt16Array\x10\x1b\x12\x0f\n\x0bUInt32Array\x10\x1c\x12\x0f\n\x0bUInt64Array\x10\x1d\x12\x0e\n\nFloatArray\x10\x1e\x12\x0f\n\x0b\x44oubleArray\x10\x1f\x12\x10\n\x0c\x42ooleanArray\x10 \x12\x0f\n\x0bStringArray\x10!\x12\x11\n\rDateTimeArray\x10\"B,\n\x19org.eclipse.tahu.protobufB\x0fSparkplugBProto') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'sparkplug_b_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\031org.eclipse.tahu.protobufB\017SparkplugBProto' + _DATATYPE._serialized_start=2850 + _DATATYPE._serialized_end=3348 + _PAYLOAD._serialized_start=49 + _PAYLOAD._serialized_end=2847 + _PAYLOAD_TEMPLATE._serialized_start=181 + _PAYLOAD_TEMPLATE._serialized_end=731 + _PAYLOAD_TEMPLATE_PARAMETER._serialized_start=391 + _PAYLOAD_TEMPLATE_PARAMETER._serialized_end=721 + _PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION._serialized_start=677 + _PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION._serialized_end=712 + _PAYLOAD_DATASET._serialized_start=734 + _PAYLOAD_DATASET._serialized_end=1269 + _PAYLOAD_DATASET_DATASETVALUE._serialized_start=864 + _PAYLOAD_DATASET_DATASETVALUE._serialized_end=1167 + _PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION._serialized_start=1125 + _PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION._serialized_end=1158 + _PAYLOAD_DATASET_ROW._serialized_start=1169 + _PAYLOAD_DATASET_ROW._serialized_end=1259 + _PAYLOAD_PROPERTYVALUE._serialized_start=1272 + _PAYLOAD_PROPERTYVALUE._serialized_end=1761 + _PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION._serialized_start=1718 + _PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION._serialized_end=1752 + _PAYLOAD_PROPERTYSET._serialized_start=1763 + _PAYLOAD_PROPERTYSET._serialized_end=1866 + _PAYLOAD_PROPERTYSETLIST._serialized_start=1868 + _PAYLOAD_PROPERTYSETLIST._serialized_end=1964 + _PAYLOAD_METADATA._serialized_start=1967 + _PAYLOAD_METADATA._serialized_end=2131 + _PAYLOAD_METRIC._serialized_start=2134 + _PAYLOAD_METRIC._serialized_end=2837 + _PAYLOAD_METRIC_METRICVALUEEXTENSION._serialized_start=2796 + _PAYLOAD_METRIC_METRICVALUEEXTENSION._serialized_end=2828 # @@protoc_insertion_point(module_scope) From 5d9866d9dedff3fc10987d588e862fe2d6d1d70e Mon Sep 17 00:00:00 2001 From: Justin Brzozoski Date: Wed, 14 Dec 2022 10:55:04 -0500 Subject: [PATCH 3/3] STASH --- python/core/tahu/__init__.py | 132 ++++--- python/core/tahu/edge.py | 75 ++-- python/core/tahu/sparkplug_b_pb2.pyi | 521 +++++++++++++++++++++++++++ 3 files changed, 631 insertions(+), 97 deletions(-) create mode 100644 python/core/tahu/sparkplug_b_pb2.pyi diff --git a/python/core/tahu/__init__.py b/python/core/tahu/__init__.py index a9088cfe..103acda9 100644 --- a/python/core/tahu/__init__.py +++ b/python/core/tahu/__init__.py @@ -19,6 +19,7 @@ import time import enum from . import sparkplug_b_pb2 +from typing import * class SparkplugDecodeError(ValueError): """Exception type for all errors related to decoding SparkplugB payloads""" @@ -51,8 +52,8 @@ class DataType(enum.IntEnum): PropertySetList = sparkplug_b_pb2.PropertySetList -def _get_type_from_datatype(datatype): - """Return the best Python type to handle a SparkplugB DataType if one exists, None otherwise""" +def _get_type_from_datatype(datatype: DataType) -> Type: + """Return the best Python type to handle a SparkplugB DataType if one exists, raises ValueError otherwise""" # TODO - Figure out the best way to handle the complex types in this list. # For now, they are commented out to indicate there is no native Python type. PYTHON_TYPE_PER_DATATYPE = { @@ -79,10 +80,12 @@ def _get_type_from_datatype(datatype): #DataType.PropertySet : lambda x : x, #DataType.PropertySetList : lambda x : x, } - return PYTHON_TYPE_PER_DATATYPE.get(datatype, None) + if datatype not in PYTHON_TYPE_PER_DATATYPE: + raise ValueError(f'DataType {datatype} not fully supported') + return PYTHON_TYPE_PER_DATATYPE[datatype] -def _get_datatype_from_type(pytype): - """Return the best SparkplugB DataType type to handle a Python type if one exists, None otherwise""" +def _get_datatype_from_type(pytype: Type) -> DataType: + """Return the best SparkplugB DataType type to handle a Python type if one exists, raises ValueError otherwise""" DATATYPE_PER_PYTHON_TYPE = { int: DataType.Int64, float: DataType.Double, @@ -90,9 +93,11 @@ def _get_datatype_from_type(pytype): str: DataType.String, bytes: DataType.Bytes, } - return DATATYPE_PER_PYTHON_TYPE.get(pytype, None) + if pytype not in DATATYPE_PER_PYTHON_TYPE: + raise ValueError(f'No good Sparkplug type for Python type {pytype}') + return DATATYPE_PER_PYTHON_TYPE[pytype] -def _get_usable_value_fields_for_datatype(datatype): +def _get_usable_value_fields_for_datatype(datatype: DataType) -> Set[str]: """Return a set of "oneof value" field names that we are willing to read a value from for a given SparkplugB DataType""" # NOTE: This is not normative by spec, but is useful when talking to an imperfect # implementation on the other side. It lists for each expected datatype @@ -122,13 +127,13 @@ def _get_usable_value_fields_for_datatype(datatype): } return CONVERTIBLE_VALUE_FIELD_PER_DATATYPE.get(datatype, set()) -def _is_int_datatype(datatype): +def _is_int_datatype(datatype: DataType) -> bool: """Return whether SparkplugB DataType is an integer type""" return (datatype in (DataType.Int8, DataType.UInt8, DataType.Int16, DataType.UInt16, DataType.Int32, DataType.UInt32, DataType.Int64, DataType.UInt64)) -def _get_min_max_limits_per_int_datatype(datatype): +def _get_min_max_limits_per_int_datatype(datatype: DataType) -> Tuple[int, int]: """Return a tuple with "allowable" (min, max) range for a given integer SparkplugB DataType""" # I could not find these constant limits in Python ... # It's not in ctypes or anywhere else AFAIK! @@ -144,7 +149,7 @@ def _get_min_max_limits_per_int_datatype(datatype): } return MIN_MAX_LIMITS_PER_INTEGER_DATATYPE[datatype] -def timestamp_to_sparkplug(utc_seconds=None): +def timestamp_to_sparkplug(utc_seconds: Optional[float] = None) -> int: """ Convert a timestamp to SparkplugB DateTime value @@ -162,7 +167,7 @@ def timestamp_to_sparkplug(utc_seconds=None): utc_seconds = time.clock_gettime(time.CLOCK_REALTIME) return int(utc_seconds * 1000) -def timestamp_from_sparkplug(sparkplug_time): +def timestamp_from_sparkplug(sparkplug_time: float) -> float: """ Convert a SparkplugB DateTime value to a timestamp @@ -175,7 +180,9 @@ def timestamp_from_sparkplug(sparkplug_time): """ return (float(sparkplug_time) / 1000.0) -def value_to_sparkplug(container, datatype, value, u32_in_long=False): +SparkplugValueContainer = Union[sparkplug_b_pb2.Payload.Template.Parameter, sparkplug_b_pb2.Payload.DataSet.DataSetValue, sparkplug_b_pb2.Payload.PropertyValue, sparkplug_b_pb2.Payload.Metric] + +def value_to_sparkplug(container: SparkplugValueContainer, datatype: DataType, value: Any, u32_in_long: bool = False) -> None: """ Help pass a value into a payload container in preparation of protobuf packing @@ -214,16 +221,16 @@ def value_to_sparkplug(container, datatype, value, u32_in_long=False): container.boolean_value = value elif datatype in [DataType.String, DataType.Text, DataType.UUID]: container.string_value = value - elif datatype in [DataType.Bytes, DataType.File]: + elif isinstance(container, sparkplug_b_pb2.Payload.Metric) and (datatype in [DataType.Bytes, DataType.File]): container.bytes_value = value - elif datatype == DataType.Template: + elif isinstance(container, sparkplug_b_pb2.Payload.Metric) and (datatype == DataType.Template): value.to_sparkplug_template(container.template_value, u32_in_long) - elif datatype == DataType.DataSet: + elif isinstance(container, sparkplug_b_pb2.Payload.Metric) and (datatype == DataType.DataSet): value.to_sparkplug_dataset(container.dataset_value, u32_in_long) else: - raise ValueError('Unhandled datatype={} in value_to_sparkplug'.format(datatype)) + raise ValueError(f'Unhandled datatype={datatype} for container={type(container)} in value_to_sparkplug') -def value_from_sparkplug(container, datatype): +def value_from_sparkplug(container: SparkplugValueContainer, datatype: DataType) -> Any: """ Help read a value out of a payload container after protobuf unpacking @@ -244,18 +251,14 @@ def value_from_sparkplug(container, datatype): # implementations out there that might use the wrong value field. # We clamp values on any incoming integers larger than the datatype supports. # Tests well against Ignition 8.1.1 - try: - has_null = container.HasField('is_null') - except ValueError: - has_null = False - if has_null and container.is_null: - return None + if isinstance(container, sparkplug_b_pb2.Payload.PropertyValue) or isinstance(container, sparkplug_b_pb2.Payload.Metric): + if container.HasField('is_null') and container.is_null: + return None value_field = container.WhichOneof('value') if value_field is None: raise SparkplugDecodeError('No value field present') if value_field not in _get_usable_value_fields_for_datatype(datatype): - raise SparkplugDecodeError('Unexpected value field {} for datatype {}'.format(value_field, - datatype)) + raise SparkplugDecodeError(f'Unexpected value field {value_field} for datatype {datatype}') value = getattr(container, value_field) if _is_int_datatype(datatype): value_min, value_max = _get_min_max_limits_per_int_datatype(datatype) @@ -273,20 +276,23 @@ def value_from_sparkplug(container, datatype): value = value_min elif value > value_max: value = value_max - if datatype == DataType.Template: - return Template.from_sparkplug_template(value) if datatype == DataType.DataSet: return DataSet.from_sparkplug_dataset(value) pytype = _get_type_from_datatype(datatype) if pytype is not None: return pytype(value) - raise SparkplugDecodeError('Unhandled datatype={} in value_from_sparkplug'.format(datatype)) - -def mqtt_params(server, port=None, - username=None, password=None, - client_id=None, keepalive=60, - tls_enabled=False, ca_certs=None, certfile=None, - keyfile=None): + raise SparkplugDecodeError(f'Unhandled datatype={datatype} in value_from_sparkplug') + +def mqtt_params(server: str, + port: Optional[int] = None, + username: Optional[str] = None, + password: Optional[str] = None, + client_id: Optional[str] = None, + keepalive: int = 60, + tls_enabled: bool = False, + ca_certs: Optional[str] = None, + certfile: Optional[str] = None, + keyfile: Optional[str] = None) -> Dict[str, Any]: """ Collect all setup parameters for a single MQTT connection into a object to be used when initializing a Node @@ -304,7 +310,7 @@ def mqtt_params(server, port=None, :param certfile: strings pointing to the PEM encoded client certificate (optional, defaults to None) :param keyfile: strings pointing to the PEM encoded client private keys (optional, defaults to None) """ - mqtt_params = {} + mqtt_params: Dict[str, Any] = {} mqtt_params['client_id'] = client_id mqtt_params['server'] = server mqtt_params['port'] = port if port else (8883 if tls_enabled else 1883) @@ -325,16 +331,19 @@ class DataSet(object): # TODO - Add methods to allow easy value access by indices, e.g. with DataSet D you could just reference D[0][0] or D[0][column_name] - def __init__(self, name_datatype_tuples): + def __init__(self, name_datatype_tuples: Dict[str, int]) -> None: self._num_columns = len(name_datatype_tuples) if self._num_columns == 0: raise ValueError('dataset must have at least one column') self._column_names = [str(n) for n in name_datatype_tuples.keys()] self._column_datatypes = [DataType(d) for d in name_datatype_tuples.values()] - self._data = [] + self._data: List[List] = [] - def add_rows(self, data, keyed=False, in_columns=False, - insert_index=None): + def add_rows(self, + data: Union[List, Dict], + keyed: bool = False, + in_columns: bool = False, + insert_index: Optional[int] = None) -> None: """ Add rows to an existing DataSet object @@ -369,7 +378,7 @@ def add_rows(self, data, keyed=False, in_columns=False, if ((data is None) or (len(data) == 0)): return new_data = [] - col_keys = self._columns_names if keyed else range(self._num_columns) + col_keys = self._column_names if keyed else range(self._num_columns) col_python_types = [_get_type_from_datatype(self._column_datatypes[x]) for x in range(self._num_columns)] col_helper = tuple(zip(col_keys, col_python_types)) if not in_columns: @@ -379,14 +388,14 @@ def add_rows(self, data, keyed=False, in_columns=False, new_row.append(t(row[k])) new_data.append(new_row) else: - num_rows = len(data[col_keys[0]]) + num_rows = len(data[col_keys[0]]) # type: ignore for k in col_keys[1:]: - if len(data[k]) != num_rows: - raise ValueError('data does not have {} rows in all columns'.format(num_rows)) + if len(data[k]) != num_rows: # type: ignore + raise ValueError(f'data does not have {num_rows} rows in all columns') for row_index in range(num_rows): new_row = [] for k,t in col_helper: - new_row.append(t(data[k][row_index])) + new_row.append(t(data[k][row_index])) # type: ignore new_data.append(new_row) if insert_index: # This is a neat Python trick. @@ -397,15 +406,18 @@ def add_rows(self, data, keyed=False, in_columns=False, else: self._data.extend(new_data) - def get_num_columns(self): + def get_num_columns(self) -> int: """Return the number of columns in the DataSet""" return self._num_columns - def get_num_rows(self): + def get_num_rows(self) -> int: """Return the number of rows in the DataSet""" return len(self._data) - def remove_rows(self, start_index=0, end_index=None, num_rows=None): + def remove_rows(self, + start_index: int = 0, + end_index: Optional[int] = None, + num_rows: Optional[int] = None) -> None: """ Remove a contiguous set of rows from the DataSet @@ -420,8 +432,12 @@ def remove_rows(self, start_index=0, end_index=None, num_rows=None): + num_rows) if num_rows else len(self._data) self._data[start_index:end_index] = [] - def get_rows(self, start_index=0, end_index=None, num_rows=None, - in_columns=False, keyed=False): + def get_rows(self, + start_index: int = 0, + end_index: Optional[int] = None, + num_rows: Optional[int] = None, + in_columns: bool = False, + keyed: bool = False) -> Union[List, Dict]: """ Returns a copy of the data from one or more rows in the DataSet @@ -445,18 +461,16 @@ def get_rows(self, start_index=0, end_index=None, num_rows=None, row)) for row in self._data[start_index:end_index]] return self._data[start_index:end_index] if not keyed: - data = [] + listdata = [] for k in range(self._num_columns): - data.append([self._data[r][k] for r in range(start_index, - end_index)]) - return data - data = {} + listdata.append([self._data[r][k] for r in range(start_index, end_index)]) + return listdata + dictdata = {} for k in range(len(self._column_names)): - data[self._column_names[k]] = [self._data[r][k] for r in range(start_index, - end_index)] - return data + dictdata[self._column_names[k]] = [self._data[r][k] for r in range(start_index, end_index)] + return dictdata - def to_sparkplug_dataset(self, sp_dataset, u32_in_long=False): + def to_sparkplug_dataset(self, sp_dataset: sparkplug_b_pb2.Payload.DataSet, u32_in_long: bool = False) -> sparkplug_b_pb2.Payload.DataSet: """ Copy the DataSet into a SparkplugB Payload.DataSet @@ -475,7 +489,7 @@ def to_sparkplug_dataset(self, sp_dataset, u32_in_long=False): return sp_dataset @classmethod - def from_sparkplug_dataset(cls, sp_dataset): + def from_sparkplug_dataset(cls, sp_dataset: sparkplug_b_pb2.Payload.DataSet) -> DataSet: """ Create a new DataSet object from a SparkplugB Payload.DataSet diff --git a/python/core/tahu/edge.py b/python/core/tahu/edge.py index 42055001..6cba17fa 100644 --- a/python/core/tahu/edge.py +++ b/python/core/tahu/edge.py @@ -22,8 +22,9 @@ import paho.mqtt.client as mqtt import tahu from . import sparkplug_b_pb2 +from typing import * -def _rebirth_command_handler(tag, context, value): +def _rebirth_command_handler(tag: Metric, context: Any, value: Any) -> None: """ Metric command handler for "Node Control/Rebirth" @@ -41,7 +42,7 @@ def _rebirth_command_handler(tag, context, value): # We don't care what value the server wrote to the tag, any write is considered a trigger. tag._parent_device._needs_to_send_birth = True -def _next_server_command_handler(tag, context, value): +def _next_server_command_handler(tag: Metric, context: Any, value: Any) -> None: """ Metric command handler for "Node Control/Next Server" @@ -70,8 +71,8 @@ class Metric(object): The change_value is used to report new values over Sparkplug, and the cmd_handler provided when created will be called if a new value is received from Sparkplug. """ - def __init__(self, parent_device, name, datatype=None, value=None, - cmd_handler=None, cmd_context=None): + def __init__(self, parent_device: Union[Node, Device], name: Text, datatype: Optional[tahu.DataType] = None, value: Optional[Any] = None, + cmd_handler: Optional[Callable[[Metric, Optional[Any], Any], None]] = None, cmd_context: Optional[Any] = None) -> None: """ Initialize a Metric object @@ -95,18 +96,15 @@ def __init__(self, parent_device, name, datatype=None, value=None, self._datatype = tahu.DataType(datatype) else: self._datatype = tahu._get_datatype_from_type(type(value)) - if self._datatype is None: - raise ValueError('Need explicit datatype for Python type {}'.format(type(value))) - self._value = value self._last_received = None self._last_sent = None self._cmd_handler = cmd_handler self._cmd_context = cmd_context - self._properties = [] + self._properties: List[MetricProperty] = [] self.alias = parent_device._attach_tag(self) - def _attach_property(self, property): + def _attach_property(self, property: MetricProperty) -> int: """ Attach a Sparkplug property object to this metric @@ -120,7 +118,7 @@ def _attach_property(self, property): # TODO - Add checking/handling depending if we're connected return next_index - def _fill_in_payload_metric(self, new_metric, birth=False): + def _fill_in_payload_metric(self, new_metric: sparkplug_b_pb2.Payload.Metric, birth: bool = False) -> None: """ Fill in the Metric message object provided with the metrics most recent values @@ -140,7 +138,7 @@ def _fill_in_payload_metric(self, new_metric, birth=False): if birth or p._report_with_data: new_metric.properties.keys.append(p._name) pvalue = new_metric.properties.values.add() - pvalue.type = p._datatype + pvalue.type = tahu.DataType(p._datatype) tahu.value_to_sparkplug(pvalue, pvalue.type, p._value, self._u32_in_long) @@ -154,7 +152,7 @@ def _fill_in_payload_metric(self, new_metric, birth=False): self._u32_in_long) self._last_sent = self._value - def change_value(self, value, send_immediate=True): + def change_value(self, value: Any, send_immediate: bool = True) -> int: """ Update the known value of the metric and optionally cause a payload to be sent immediately @@ -169,7 +167,7 @@ def change_value(self, value, send_immediate=True): self._parent_device.send_data([self.alias]) return self.alias - def _handle_sparkplug_command(self, Metric): + def _handle_sparkplug_command(self, Metric: sparkplug_b_pb2.Payload.Metric) -> None: """ Stub for handling received metrics and calling out to cmd_handler hooks as needed @@ -178,21 +176,18 @@ def _handle_sparkplug_command(self, Metric): """ # Note that we enforce OUR expected datatype on the value as we pull it from the metric try: - value = tahu.value_from_sparkplug(Metric, - self._datatype) + value = tahu.value_from_sparkplug(Metric, self._datatype) except tahu.SparkplugDecodeError as errmsg: - self._logger.warning('Sparkplug decode error for tag {}: {}'.format(self.name, - errmsg)) + self._logger.warning('Sparkplug decode error for tag {}: {}'.format(self.name, errmsg)) return - self._logger.debug('Command received for tag {} = {}'.format(self.name, - value)) + self._logger.debug('Command received for tag {} = {}'.format(self.name, value)) if self._cmd_handler: self._cmd_handler(self, self._cmd_context, value) else: self._logger.info('Received command for tag {} with no handler. No action taken.'.format(self.name)) self._last_received = value - def changed_since_last_sent(self): + def changed_since_last_sent(self) -> bool: """If the metric value or any of the dynamic properties have changed since the most recent publish, returns true""" for p in self._properties: if p._report_with_data and p.changed_since_last_sent(): @@ -204,8 +199,8 @@ class MetricProperty(object): """ The MetricProperty object manages all aspects of a single metric property """ - def __init__(self, parent_metric, name, datatype, value, - report_with_data=False): + def __init__(self, parent_metric: Metric, name: Text, datatype: Optional[tahu.DataType], value: Optional[Any], + report_with_data: bool = False) -> None: """ Initialize a MetricProperty object @@ -230,11 +225,11 @@ def __init__(self, parent_metric, name, datatype, value, self._last_sent = None self._parent_metric._attach_property(self) - def changed_since_last_sent(self): + def changed_since_last_sent(self) -> bool: """If the preoprty value has changed since the most recent publish, returns true""" return (self._value != self._last_sent) - def change_value(self, value, send_immediate=False): + def change_value(self, value: Any, send_immediate: bool = False) -> int: """ Update the value of the property and optionally cause a payload to be sent immediately @@ -251,7 +246,7 @@ def change_value(self, value, send_immediate=False): return self._parent_metric.alias -def bulk_properties(parent_metric, property_dict): +def bulk_properties(parent_metric: Metric, property_dict: Dict[str, Any]) -> List[MetricProperty]: """ Create multiple property objects and attach them all to the same metric quickly and easily @@ -273,20 +268,21 @@ class _AbstractBaseDevice(object): The _AbstractBaseDevice should not be instantiated directly. Use Node or Device instead. """ - def __init__(self): + def __init__(self) -> None: self._mqtt_client = None - self._tags = [] + self._tags: List[Metric] = [] self._needs_to_send_birth = True + self._logger = logging.getLogger('_AbstractBaseDevice') - def get_tag_names(self): + def get_tag_names(self) -> List[str]: """Return a list of the names of all metrics on this device""" return [m.name for m in self._tags] - def _get_next_seq(self): + def _get_next_seq(self) -> int: """Returns the Sparkplug `seq` number to use on the next publish""" raise NotImplementedError('_get_next_seq not implemented on this class') - def _attach_tag(self, tag): + def _attach_tag(self, tag: Metric) -> int: """ Attach a Metric object to this device @@ -304,7 +300,7 @@ def _attach_tag(self, tag): # TODO - Add another function to remove a tag - def _get_payload(self, alias_list, birth): + def _get_payload(self, alias_list: Iterable[int], birth: bool) -> sparkplug_b_pb2.Payload: """ Create and return a Sparkplug Payload message for this device and the given metric aliases @@ -324,7 +320,7 @@ def _get_payload(self, alias_list, birth): self._tags[m]._fill_in_payload_metric(new_metric, birth=birth) return tx_payload - def _get_topic(self, cmd_type): + def _get_topic(self, cmd_type: str) -> str: """ Return the topic string to use for a command of the type given on this device object @@ -335,7 +331,7 @@ def _get_topic(self, cmd_type): """ raise NotImplementedError('_get_topic not implemented on this class') - def send_birth(self): + def send_birth(self) -> Any: """ Generate and send a birth message for this device. @@ -346,7 +342,7 @@ def send_birth(self): """ raise NotImplementedError('send_birth not implemented on this class') - def send_death(self): + def send_death(self) -> Any: """ Generate and send a death message for this device. @@ -357,7 +353,7 @@ def send_death(self): """ raise NotImplementedError('send_death not implemented on this class') - def send_data(self, aliases=None, changed_only=False): + def send_data(self, aliases: Optional[List[int]] = None, changed_only: bool = False) -> Any: """ Generate and send a data message for this device. @@ -367,13 +363,16 @@ def send_data(self, aliases=None, changed_only=False): :param changed_only: whether to filter the metrics to only include those that have changed since the prior publish (Default value = False) """ + if self._mqtt_client is None: + raise RunTimeError('Trying to send data without an MQTT client connection') + return if not self.is_connected(): self._logger.warning('Trying to send data when not connected. Skipping.') return if self._needs_to_send_birth: return self.send_birth() if aliases is None: - aliases = range(len(self._tags)) + aliases = list(range(len(self._tags))) if changed_only: aliases = [x for x in aliases if self._tags[x].changed_since_last_sent()] if len(aliases) == 0: @@ -384,11 +383,11 @@ def send_data(self, aliases=None, changed_only=False): return self._mqtt_client.publish(topic, tx_payload.SerializeToString()) - def get_watched_topic(self): + def get_watched_topic(self) -> str: """Return the MQTT topic string on which this device expects to receive messages""" return self._get_topic('CMD') - def _handle_payload(self, topic, payload): + def _handle_payload(self, topic: str, payload: sparkplug_b_pb2.Payload) -> bool: """ Handle a received Sparkplug payload diff --git a/python/core/tahu/sparkplug_b_pb2.pyi b/python/core/tahu/sparkplug_b_pb2.pyi new file mode 100644 index 00000000..a64b2465 --- /dev/null +++ b/python/core/tahu/sparkplug_b_pb2.pyi @@ -0,0 +1,521 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import typing +import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _DataType: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType +class _DataTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DataType.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + Unknown: _DataType.ValueType # 0 + """Unknown placeholder for future expansion.""" + + Int8: _DataType.ValueType # 1 + """Basic Types""" + + Int16: _DataType.ValueType # 2 + Int32: _DataType.ValueType # 3 + Int64: _DataType.ValueType # 4 + UInt8: _DataType.ValueType # 5 + UInt16: _DataType.ValueType # 6 + UInt32: _DataType.ValueType # 7 + UInt64: _DataType.ValueType # 8 + Float: _DataType.ValueType # 9 + Double: _DataType.ValueType # 10 + Boolean: _DataType.ValueType # 11 + String: _DataType.ValueType # 12 + DateTime: _DataType.ValueType # 13 + Text: _DataType.ValueType # 14 + UUID: _DataType.ValueType # 15 + """Additional Metric Types""" + + DataSet: _DataType.ValueType # 16 + Bytes: _DataType.ValueType # 17 + File: _DataType.ValueType # 18 + Template: _DataType.ValueType # 19 + PropertySet: _DataType.ValueType # 20 + """Additional PropertyValue Types""" + + PropertySetList: _DataType.ValueType # 21 + Int8Array: _DataType.ValueType # 22 + """Array Types""" + + Int16Array: _DataType.ValueType # 23 + Int32Array: _DataType.ValueType # 24 + Int64Array: _DataType.ValueType # 25 + UInt8Array: _DataType.ValueType # 26 + UInt16Array: _DataType.ValueType # 27 + UInt32Array: _DataType.ValueType # 28 + UInt64Array: _DataType.ValueType # 29 + FloatArray: _DataType.ValueType # 30 + DoubleArray: _DataType.ValueType # 31 + BooleanArray: _DataType.ValueType # 32 + StringArray: _DataType.ValueType # 33 + DateTimeArray: _DataType.ValueType # 34 +class DataType(_DataType, metaclass=_DataTypeEnumTypeWrapper): + """Indexes of Data Types""" + pass + +Unknown: DataType.ValueType # 0 +"""Unknown placeholder for future expansion.""" + +Int8: DataType.ValueType # 1 +"""Basic Types""" + +Int16: DataType.ValueType # 2 +Int32: DataType.ValueType # 3 +Int64: DataType.ValueType # 4 +UInt8: DataType.ValueType # 5 +UInt16: DataType.ValueType # 6 +UInt32: DataType.ValueType # 7 +UInt64: DataType.ValueType # 8 +Float: DataType.ValueType # 9 +Double: DataType.ValueType # 10 +Boolean: DataType.ValueType # 11 +String: DataType.ValueType # 12 +DateTime: DataType.ValueType # 13 +Text: DataType.ValueType # 14 +UUID: DataType.ValueType # 15 +"""Additional Metric Types""" + +DataSet: DataType.ValueType # 16 +Bytes: DataType.ValueType # 17 +File: DataType.ValueType # 18 +Template: DataType.ValueType # 19 +PropertySet: DataType.ValueType # 20 +"""Additional PropertyValue Types""" + +PropertySetList: DataType.ValueType # 21 +Int8Array: DataType.ValueType # 22 +"""Array Types""" + +Int16Array: DataType.ValueType # 23 +Int32Array: DataType.ValueType # 24 +Int64Array: DataType.ValueType # 25 +UInt8Array: DataType.ValueType # 26 +UInt16Array: DataType.ValueType # 27 +UInt32Array: DataType.ValueType # 28 +UInt64Array: DataType.ValueType # 29 +FloatArray: DataType.ValueType # 30 +DoubleArray: DataType.ValueType # 31 +BooleanArray: DataType.ValueType # 32 +StringArray: DataType.ValueType # 33 +DateTimeArray: DataType.ValueType # 34 +global___DataType = DataType + + +class Payload(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class Template(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class Parameter(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class ParameterValueExtension(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + def __init__(self, + ) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + INT_VALUE_FIELD_NUMBER: builtins.int + LONG_VALUE_FIELD_NUMBER: builtins.int + FLOAT_VALUE_FIELD_NUMBER: builtins.int + DOUBLE_VALUE_FIELD_NUMBER: builtins.int + BOOLEAN_VALUE_FIELD_NUMBER: builtins.int + STRING_VALUE_FIELD_NUMBER: builtins.int + EXTENSION_VALUE_FIELD_NUMBER: builtins.int + name: typing.Text + type: builtins.int + int_value: builtins.int + long_value: builtins.int + float_value: builtins.float + double_value: builtins.float + boolean_value: builtins.bool + string_value: typing.Text + @property + def extension_value(self) -> global___Payload.Template.Parameter.ParameterValueExtension: ... + def __init__(self, + *, + name: typing.Optional[typing.Text] = ..., + type: typing.Optional[builtins.int] = ..., + int_value: typing.Optional[builtins.int] = ..., + long_value: typing.Optional[builtins.int] = ..., + float_value: typing.Optional[builtins.float] = ..., + double_value: typing.Optional[builtins.float] = ..., + boolean_value: typing.Optional[builtins.bool] = ..., + string_value: typing.Optional[typing.Text] = ..., + extension_value: typing.Optional[global___Payload.Template.Parameter.ParameterValueExtension] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["boolean_value",b"boolean_value","double_value",b"double_value","extension_value",b"extension_value","float_value",b"float_value","int_value",b"int_value","long_value",b"long_value","name",b"name","string_value",b"string_value","type",b"type","value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["boolean_value",b"boolean_value","double_value",b"double_value","extension_value",b"extension_value","float_value",b"float_value","int_value",b"int_value","long_value",b"long_value","name",b"name","string_value",b"string_value","type",b"type","value",b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["int_value","long_value","float_value","double_value","boolean_value","string_value","extension_value"]]: ... + + VERSION_FIELD_NUMBER: builtins.int + METRICS_FIELD_NUMBER: builtins.int + PARAMETERS_FIELD_NUMBER: builtins.int + TEMPLATE_REF_FIELD_NUMBER: builtins.int + IS_DEFINITION_FIELD_NUMBER: builtins.int + version: typing.Text + """The version of the Template to prevent mismatches""" + + @property + def metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Payload.Metric]: + """Each metric includes a name, datatype, and optionally a value""" + pass + @property + def parameters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Payload.Template.Parameter]: ... + template_ref: typing.Text + """MUST be a reference to a template definition if this is an instance (i.e. the name of the template definition) - MUST be omitted for template definitions""" + + is_definition: builtins.bool + def __init__(self, + *, + version: typing.Optional[typing.Text] = ..., + metrics: typing.Optional[typing.Iterable[global___Payload.Metric]] = ..., + parameters: typing.Optional[typing.Iterable[global___Payload.Template.Parameter]] = ..., + template_ref: typing.Optional[typing.Text] = ..., + is_definition: typing.Optional[builtins.bool] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["is_definition",b"is_definition","template_ref",b"template_ref","version",b"version"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["is_definition",b"is_definition","metrics",b"metrics","parameters",b"parameters","template_ref",b"template_ref","version",b"version"]) -> None: ... + + class DataSet(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class DataSetValue(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class DataSetValueExtension(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + def __init__(self, + ) -> None: ... + + INT_VALUE_FIELD_NUMBER: builtins.int + LONG_VALUE_FIELD_NUMBER: builtins.int + FLOAT_VALUE_FIELD_NUMBER: builtins.int + DOUBLE_VALUE_FIELD_NUMBER: builtins.int + BOOLEAN_VALUE_FIELD_NUMBER: builtins.int + STRING_VALUE_FIELD_NUMBER: builtins.int + EXTENSION_VALUE_FIELD_NUMBER: builtins.int + int_value: builtins.int + long_value: builtins.int + float_value: builtins.float + double_value: builtins.float + boolean_value: builtins.bool + string_value: typing.Text + @property + def extension_value(self) -> global___Payload.DataSet.DataSetValue.DataSetValueExtension: ... + def __init__(self, + *, + int_value: typing.Optional[builtins.int] = ..., + long_value: typing.Optional[builtins.int] = ..., + float_value: typing.Optional[builtins.float] = ..., + double_value: typing.Optional[builtins.float] = ..., + boolean_value: typing.Optional[builtins.bool] = ..., + string_value: typing.Optional[typing.Text] = ..., + extension_value: typing.Optional[global___Payload.DataSet.DataSetValue.DataSetValueExtension] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["boolean_value",b"boolean_value","double_value",b"double_value","extension_value",b"extension_value","float_value",b"float_value","int_value",b"int_value","long_value",b"long_value","string_value",b"string_value","value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["boolean_value",b"boolean_value","double_value",b"double_value","extension_value",b"extension_value","float_value",b"float_value","int_value",b"int_value","long_value",b"long_value","string_value",b"string_value","value",b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["int_value","long_value","float_value","double_value","boolean_value","string_value","extension_value"]]: ... + + class Row(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + ELEMENTS_FIELD_NUMBER: builtins.int + @property + def elements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Payload.DataSet.DataSetValue]: ... + def __init__(self, + *, + elements: typing.Optional[typing.Iterable[global___Payload.DataSet.DataSetValue]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["elements",b"elements"]) -> None: ... + + NUM_OF_COLUMNS_FIELD_NUMBER: builtins.int + COLUMNS_FIELD_NUMBER: builtins.int + TYPES_FIELD_NUMBER: builtins.int + ROWS_FIELD_NUMBER: builtins.int + num_of_columns: builtins.int + @property + def columns(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: ... + @property + def types(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def rows(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Payload.DataSet.Row]: ... + def __init__(self, + *, + num_of_columns: typing.Optional[builtins.int] = ..., + columns: typing.Optional[typing.Iterable[typing.Text]] = ..., + types: typing.Optional[typing.Iterable[builtins.int]] = ..., + rows: typing.Optional[typing.Iterable[global___Payload.DataSet.Row]] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["num_of_columns",b"num_of_columns"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["columns",b"columns","num_of_columns",b"num_of_columns","rows",b"rows","types",b"types"]) -> None: ... + + class PropertyValue(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class PropertyValueExtension(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + def __init__(self, + ) -> None: ... + + TYPE_FIELD_NUMBER: builtins.int + IS_NULL_FIELD_NUMBER: builtins.int + INT_VALUE_FIELD_NUMBER: builtins.int + LONG_VALUE_FIELD_NUMBER: builtins.int + FLOAT_VALUE_FIELD_NUMBER: builtins.int + DOUBLE_VALUE_FIELD_NUMBER: builtins.int + BOOLEAN_VALUE_FIELD_NUMBER: builtins.int + STRING_VALUE_FIELD_NUMBER: builtins.int + PROPERTYSET_VALUE_FIELD_NUMBER: builtins.int + PROPERTYSETS_VALUE_FIELD_NUMBER: builtins.int + EXTENSION_VALUE_FIELD_NUMBER: builtins.int + type: builtins.int + is_null: builtins.bool + int_value: builtins.int + long_value: builtins.int + float_value: builtins.float + double_value: builtins.float + boolean_value: builtins.bool + string_value: typing.Text + @property + def propertyset_value(self) -> global___Payload.PropertySet: ... + @property + def propertysets_value(self) -> global___Payload.PropertySetList: + """List of Property Values""" + pass + @property + def extension_value(self) -> global___Payload.PropertyValue.PropertyValueExtension: ... + def __init__(self, + *, + type: typing.Optional[builtins.int] = ..., + is_null: typing.Optional[builtins.bool] = ..., + int_value: typing.Optional[builtins.int] = ..., + long_value: typing.Optional[builtins.int] = ..., + float_value: typing.Optional[builtins.float] = ..., + double_value: typing.Optional[builtins.float] = ..., + boolean_value: typing.Optional[builtins.bool] = ..., + string_value: typing.Optional[typing.Text] = ..., + propertyset_value: typing.Optional[global___Payload.PropertySet] = ..., + propertysets_value: typing.Optional[global___Payload.PropertySetList] = ..., + extension_value: typing.Optional[global___Payload.PropertyValue.PropertyValueExtension] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["boolean_value",b"boolean_value","double_value",b"double_value","extension_value",b"extension_value","float_value",b"float_value","int_value",b"int_value","is_null",b"is_null","long_value",b"long_value","propertyset_value",b"propertyset_value","propertysets_value",b"propertysets_value","string_value",b"string_value","type",b"type","value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["boolean_value",b"boolean_value","double_value",b"double_value","extension_value",b"extension_value","float_value",b"float_value","int_value",b"int_value","is_null",b"is_null","long_value",b"long_value","propertyset_value",b"propertyset_value","propertysets_value",b"propertysets_value","string_value",b"string_value","type",b"type","value",b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["int_value","long_value","float_value","double_value","boolean_value","string_value","propertyset_value","propertysets_value","extension_value"]]: ... + + class PropertySet(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + KEYS_FIELD_NUMBER: builtins.int + VALUES_FIELD_NUMBER: builtins.int + @property + def keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: + """Names of the properties""" + pass + @property + def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Payload.PropertyValue]: ... + def __init__(self, + *, + keys: typing.Optional[typing.Iterable[typing.Text]] = ..., + values: typing.Optional[typing.Iterable[global___Payload.PropertyValue]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["keys",b"keys","values",b"values"]) -> None: ... + + class PropertySetList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + PROPERTYSET_FIELD_NUMBER: builtins.int + @property + def propertyset(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Payload.PropertySet]: ... + def __init__(self, + *, + propertyset: typing.Optional[typing.Iterable[global___Payload.PropertySet]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["propertyset",b"propertyset"]) -> None: ... + + class MetaData(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + IS_MULTI_PART_FIELD_NUMBER: builtins.int + CONTENT_TYPE_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + SEQ_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + FILE_TYPE_FIELD_NUMBER: builtins.int + MD5_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + is_multi_part: builtins.bool + """Bytes specific metadata""" + + content_type: typing.Text + """General metadata + Content/Media type + """ + + size: builtins.int + """File size, String size, Multi-part size, etc""" + + seq: builtins.int + """Sequence number for multi-part messages""" + + file_name: typing.Text + """File metadata + File name + """ + + file_type: typing.Text + """File type (i.e. xml, json, txt, cpp, etc)""" + + md5: typing.Text + """md5 of data""" + + description: typing.Text + """Catchalls and future expansion + Could be anything such as json or xml of custom properties + """ + + def __init__(self, + *, + is_multi_part: typing.Optional[builtins.bool] = ..., + content_type: typing.Optional[typing.Text] = ..., + size: typing.Optional[builtins.int] = ..., + seq: typing.Optional[builtins.int] = ..., + file_name: typing.Optional[typing.Text] = ..., + file_type: typing.Optional[typing.Text] = ..., + md5: typing.Optional[typing.Text] = ..., + description: typing.Optional[typing.Text] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["content_type",b"content_type","description",b"description","file_name",b"file_name","file_type",b"file_type","is_multi_part",b"is_multi_part","md5",b"md5","seq",b"seq","size",b"size"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["content_type",b"content_type","description",b"description","file_name",b"file_name","file_type",b"file_type","is_multi_part",b"is_multi_part","md5",b"md5","seq",b"seq","size",b"size"]) -> None: ... + + class Metric(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class MetricValueExtension(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + def __init__(self, + ) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + ALIAS_FIELD_NUMBER: builtins.int + TIMESTAMP_FIELD_NUMBER: builtins.int + DATATYPE_FIELD_NUMBER: builtins.int + IS_HISTORICAL_FIELD_NUMBER: builtins.int + IS_TRANSIENT_FIELD_NUMBER: builtins.int + IS_NULL_FIELD_NUMBER: builtins.int + METADATA_FIELD_NUMBER: builtins.int + PROPERTIES_FIELD_NUMBER: builtins.int + INT_VALUE_FIELD_NUMBER: builtins.int + LONG_VALUE_FIELD_NUMBER: builtins.int + FLOAT_VALUE_FIELD_NUMBER: builtins.int + DOUBLE_VALUE_FIELD_NUMBER: builtins.int + BOOLEAN_VALUE_FIELD_NUMBER: builtins.int + STRING_VALUE_FIELD_NUMBER: builtins.int + BYTES_VALUE_FIELD_NUMBER: builtins.int + DATASET_VALUE_FIELD_NUMBER: builtins.int + TEMPLATE_VALUE_FIELD_NUMBER: builtins.int + EXTENSION_VALUE_FIELD_NUMBER: builtins.int + name: typing.Text + """Metric name - should only be included on birth""" + + alias: builtins.int + """Metric alias - tied to name on birth and included in all later DATA messages""" + + timestamp: builtins.int + """Timestamp associated with data acquisition time""" + + datatype: builtins.int + """DataType of the metric/tag value""" + + is_historical: builtins.bool + """If this is historical data and should not update real time tag""" + + is_transient: builtins.bool + """Tells consuming clients such as MQTT Engine to not store this as a tag""" + + is_null: builtins.bool + """If this is null - explicitly say so rather than using -1, false, etc for some datatypes.""" + + @property + def metadata(self) -> global___Payload.MetaData: + """Metadata for the payload""" + pass + @property + def properties(self) -> global___Payload.PropertySet: ... + int_value: builtins.int + long_value: builtins.int + float_value: builtins.float + double_value: builtins.float + boolean_value: builtins.bool + string_value: typing.Text + bytes_value: builtins.bytes + """Bytes, File""" + + @property + def dataset_value(self) -> global___Payload.DataSet: ... + @property + def template_value(self) -> global___Payload.Template: ... + @property + def extension_value(self) -> global___Payload.Metric.MetricValueExtension: ... + def __init__(self, + *, + name: typing.Optional[typing.Text] = ..., + alias: typing.Optional[builtins.int] = ..., + timestamp: typing.Optional[builtins.int] = ..., + datatype: typing.Optional[builtins.int] = ..., + is_historical: typing.Optional[builtins.bool] = ..., + is_transient: typing.Optional[builtins.bool] = ..., + is_null: typing.Optional[builtins.bool] = ..., + metadata: typing.Optional[global___Payload.MetaData] = ..., + properties: typing.Optional[global___Payload.PropertySet] = ..., + int_value: typing.Optional[builtins.int] = ..., + long_value: typing.Optional[builtins.int] = ..., + float_value: typing.Optional[builtins.float] = ..., + double_value: typing.Optional[builtins.float] = ..., + boolean_value: typing.Optional[builtins.bool] = ..., + string_value: typing.Optional[typing.Text] = ..., + bytes_value: typing.Optional[builtins.bytes] = ..., + dataset_value: typing.Optional[global___Payload.DataSet] = ..., + template_value: typing.Optional[global___Payload.Template] = ..., + extension_value: typing.Optional[global___Payload.Metric.MetricValueExtension] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["alias",b"alias","boolean_value",b"boolean_value","bytes_value",b"bytes_value","dataset_value",b"dataset_value","datatype",b"datatype","double_value",b"double_value","extension_value",b"extension_value","float_value",b"float_value","int_value",b"int_value","is_historical",b"is_historical","is_null",b"is_null","is_transient",b"is_transient","long_value",b"long_value","metadata",b"metadata","name",b"name","properties",b"properties","string_value",b"string_value","template_value",b"template_value","timestamp",b"timestamp","value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["alias",b"alias","boolean_value",b"boolean_value","bytes_value",b"bytes_value","dataset_value",b"dataset_value","datatype",b"datatype","double_value",b"double_value","extension_value",b"extension_value","float_value",b"float_value","int_value",b"int_value","is_historical",b"is_historical","is_null",b"is_null","is_transient",b"is_transient","long_value",b"long_value","metadata",b"metadata","name",b"name","properties",b"properties","string_value",b"string_value","template_value",b"template_value","timestamp",b"timestamp","value",b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["int_value","long_value","float_value","double_value","boolean_value","string_value","bytes_value","dataset_value","template_value","extension_value"]]: ... + + TIMESTAMP_FIELD_NUMBER: builtins.int + METRICS_FIELD_NUMBER: builtins.int + SEQ_FIELD_NUMBER: builtins.int + UUID_FIELD_NUMBER: builtins.int + BODY_FIELD_NUMBER: builtins.int + timestamp: builtins.int + """Timestamp at message sending time""" + + @property + def metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Payload.Metric]: + """Repeated forever - no limit in Google Protobufs""" + pass + seq: builtins.int + """Sequence number""" + + uuid: typing.Text + """UUID to track message type in terms of schema definitions""" + + body: builtins.bytes + """To optionally bypass the whole definition above""" + + def __init__(self, + *, + timestamp: typing.Optional[builtins.int] = ..., + metrics: typing.Optional[typing.Iterable[global___Payload.Metric]] = ..., + seq: typing.Optional[builtins.int] = ..., + uuid: typing.Optional[typing.Text] = ..., + body: typing.Optional[builtins.bytes] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["body",b"body","seq",b"seq","timestamp",b"timestamp","uuid",b"uuid"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["body",b"body","metrics",b"metrics","seq",b"seq","timestamp",b"timestamp","uuid",b"uuid"]) -> None: ... +global___Payload = Payload